Skip to content
Projeler
Gruplar
Parçacıklar
Yardım
Yükleniyor...
Oturum aç / Kaydol
Gezinmeyi değiştir
C
cpython
Proje
Proje
Ayrıntılar
Etkinlik
Cycle Analytics
Depo (repository)
Depo (repository)
Dosyalar
Kayıtlar (commit)
Dallar (branch)
Etiketler
Katkıda bulunanlar
Grafik
Karşılaştır
Grafikler
Konular (issue)
0
Konular (issue)
0
Liste
Pano
Etiketler
Kilometre Taşları
Birleştirme (merge) Talepleri
0
Birleştirme (merge) Talepleri
0
CI / CD
CI / CD
İş akışları (pipeline)
İşler
Zamanlamalar
Grafikler
Paketler
Paketler
Wiki
Wiki
Parçacıklar
Parçacıklar
Üyeler
Üyeler
Collapse sidebar
Close sidebar
Etkinlik
Grafik
Grafikler
Yeni bir konu (issue) oluştur
İşler
Kayıtlar (commit)
Konu (issue) Panoları
Kenar çubuğunu aç
Batuhan Osman TASKAYA
cpython
Commits
0d441119
Kaydet (Commit)
0d441119
authored
Kas 14, 2015
tarafından
Serhiy Storchaka
Dosyalara gözat
Seçenekler
Dosyalara Gözat
İndir
Eposta Yamaları
Sade Fark
Issue #25388: Fixed tokenizer crash when processing undecodable source code
with a null byte.
üst
806fb254
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
19 additions
and
8 deletions
+19
-8
test_compile.py
Lib/test/test_compile.py
+10
-0
NEWS
Misc/NEWS
+3
-0
tokenizer.c
Parser/tokenizer.c
+6
-8
No files found.
Lib/test/test_compile.py
Dosyayı görüntüle @
0d441119
...
...
@@ -504,6 +504,16 @@ if 1:
res
=
script_helper
.
run_python_until_end
(
fn
)[
0
]
self
.
assertIn
(
b
"Non-UTF-8"
,
res
.
err
)
def
test_yet_more_evil_still_undecodable
(
self
):
# Issue #25388
src
=
b
"#
\x00\n
#
\xfd\n
"
with
tempfile
.
TemporaryDirectory
()
as
tmpd
:
fn
=
os
.
path
.
join
(
tmpd
,
"bad.py"
)
with
open
(
fn
,
"wb"
)
as
fp
:
fp
.
write
(
src
)
res
=
script_helper
.
run_python_until_end
(
fn
)[
0
]
self
.
assertIn
(
b
"Non-UTF-8"
,
res
.
err
)
@support.cpython_only
def
test_compiler_recursion_limit
(
self
):
# Expected limit is sys.getrecursionlimit() * the scaling factor
...
...
Misc/NEWS
Dosyayı görüntüle @
0d441119
...
...
@@ -10,6 +10,9 @@ Release date: tba
Core and Builtins
-----------------
- Issue #25388: Fixed tokenizer crash when processing undecodable source code
with a null byte.
- Issue #22995: Default implementation of __reduce__ and __reduce_ex__ now
rejects builtin types with not defined __new__.
...
...
Parser/tokenizer.c
Dosyayı görüntüle @
0d441119
...
...
@@ -187,7 +187,8 @@ error_ret(struct tok_state *tok) /* XXX */
tok
->
decoding_erred
=
1
;
if
(
tok
->
fp
!=
NULL
&&
tok
->
buf
!=
NULL
)
/* see PyTokenizer_Free */
PyMem_FREE
(
tok
->
buf
);
tok
->
buf
=
NULL
;
tok
->
buf
=
tok
->
cur
=
tok
->
end
=
tok
->
inp
=
tok
->
start
=
NULL
;
tok
->
done
=
E_DECODE
;
return
NULL
;
/* as if it were EOF */
}
...
...
@@ -943,11 +944,6 @@ tok_nextc(struct tok_state *tok)
}
buflen
=
PyBytes_GET_SIZE
(
u
);
buf
=
PyBytes_AS_STRING
(
u
);
if
(
!
buf
)
{
Py_DECREF
(
u
);
tok
->
done
=
E_DECODE
;
return
EOF
;
}
newtok
=
PyMem_MALLOC
(
buflen
+
1
);
strcpy
(
newtok
,
buf
);
Py_DECREF
(
u
);
...
...
@@ -989,7 +985,6 @@ tok_nextc(struct tok_state *tok)
if
(
tok
->
buf
!=
NULL
)
PyMem_FREE
(
tok
->
buf
);
tok
->
buf
=
newtok
;
tok
->
line_start
=
tok
->
buf
;
tok
->
cur
=
tok
->
buf
;
tok
->
line_start
=
tok
->
buf
;
tok
->
inp
=
strchr
(
tok
->
buf
,
'\0'
);
...
...
@@ -1012,7 +1007,8 @@ tok_nextc(struct tok_state *tok)
}
if
(
decoding_fgets
(
tok
->
buf
,
(
int
)(
tok
->
end
-
tok
->
buf
),
tok
)
==
NULL
)
{
tok
->
done
=
E_EOF
;
if
(
!
tok
->
decoding_erred
)
tok
->
done
=
E_EOF
;
done
=
1
;
}
else
{
...
...
@@ -1046,6 +1042,8 @@ tok_nextc(struct tok_state *tok)
return
EOF
;
}
tok
->
buf
=
newbuf
;
tok
->
cur
=
tok
->
buf
+
cur
;
tok
->
line_start
=
tok
->
cur
;
tok
->
inp
=
tok
->
buf
+
curvalid
;
tok
->
end
=
tok
->
buf
+
newsize
;
tok
->
start
=
curstart
<
0
?
NULL
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment