Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in
Toggle navigation
M
MBetterd
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Mbetter
MBetterd
Commits
a959b49b
Commit
a959b49b
authored
Sep 26, 2025
by
Stefy Lanza (nextime / spora )
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Chunked upload
parent
48e8d2b8
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
297 additions
and
5 deletions
+297
-5
zip.html
app/templates/upload/zip.html
+124
-1
routes.py
app/upload/routes.py
+171
-2
config.py
config.py
+2
-2
No files found.
app/templates/upload/zip.html
View file @
a959b49b
...
@@ -125,6 +125,129 @@
...
@@ -125,6 +125,129 @@
color
:
#0066cc
;
color
:
#0066cc
;
}
}
</style>
</style>
<script>
const
CHUNK_SIZE
=
1024
*
1024
;
// 1MB chunks
function
uploadFileInChunks
(
file
,
matchId
)
{
const
totalChunks
=
Math
.
ceil
(
file
.
size
/
CHUNK_SIZE
);
const
uploadId
=
Date
.
now
()
+
'_'
+
Math
.
random
().
toString
(
36
).
substr
(
2
,
9
);
let
currentChunk
=
0
;
const
progressBar
=
document
.
createElement
(
'div'
);
progressBar
.
style
.
cssText
=
`
width: 100%;
height: 20px;
background-color: #f0f0f0;
border-radius: 10px;
margin: 10px 0;
overflow: hidden;
`
;
const
progressFill
=
document
.
createElement
(
'div'
);
progressFill
.
style
.
cssText
=
`
height: 100%;
background-color: #007bff;
width: 0%;
transition: width 0.3s;
`
;
progressBar
.
appendChild
(
progressFill
);
const
form
=
document
.
querySelector
(
'form'
);
form
.
style
.
display
=
'none'
;
form
.
parentNode
.
insertBefore
(
progressBar
,
form
);
const
statusDiv
=
document
.
createElement
(
'div'
);
statusDiv
.
textContent
=
'Uploading... 0%'
;
progressBar
.
parentNode
.
insertBefore
(
statusDiv
,
progressBar
);
function
uploadChunk
()
{
if
(
currentChunk
>=
totalChunks
)
{
// All chunks uploaded, finalize
finalizeUpload
(
uploadId
,
file
.
name
,
matchId
);
return
;
}
const
start
=
currentChunk
*
CHUNK_SIZE
;
const
end
=
Math
.
min
(
start
+
CHUNK_SIZE
,
file
.
size
);
const
chunk
=
file
.
slice
(
start
,
end
);
const
formData
=
new
FormData
();
formData
.
append
(
'chunk'
,
chunk
);
formData
.
append
(
'chunkIndex'
,
currentChunk
);
formData
.
append
(
'totalChunks'
,
totalChunks
);
formData
.
append
(
'uploadId'
,
uploadId
);
formData
.
append
(
'fileName'
,
file
.
name
);
formData
.
append
(
'matchId'
,
matchId
||
''
);
fetch
(
'/upload/chunk'
,
{
method
:
'POST'
,
body
:
formData
})
.
then
(
response
=>
response
.
json
())
.
then
(
data
=>
{
if
(
data
.
success
)
{
currentChunk
++
;
const
progress
=
(
currentChunk
/
totalChunks
)
*
100
;
progressFill
.
style
.
width
=
progress
+
'%'
;
statusDiv
.
textContent
=
`Uploading...
${
Math
.
round
(
progress
)}
%`
;
uploadChunk
();
}
else
{
throw
new
Error
(
data
.
error
||
'Upload failed'
);
}
})
.
catch
(
error
=>
{
statusDiv
.
textContent
=
'Upload failed: '
+
error
.
message
;
statusDiv
.
style
.
color
=
'red'
;
});
}
uploadChunk
();
}
function
finalizeUpload
(
uploadId
,
fileName
,
matchId
)
{
fetch
(
'/upload/finalize'
,
{
method
:
'POST'
,
headers
:
{
'Content-Type'
:
'application/json'
,
},
body
:
JSON
.
stringify
({
uploadId
:
uploadId
,
fileName
:
fileName
,
matchId
:
matchId
})
})
.
then
(
response
=>
response
.
json
())
.
then
(
data
=>
{
if
(
data
.
success
)
{
window
.
location
.
href
=
data
.
redirect
||
'/'
;
}
else
{
document
.
querySelector
(
'div'
).
textContent
=
'Finalization failed: '
+
(
data
.
error
||
'Unknown error'
);
document
.
querySelector
(
'div'
).
style
.
color
=
'red'
;
}
})
.
catch
(
error
=>
{
document
.
querySelector
(
'div'
).
textContent
=
'Finalization failed: '
+
error
.
message
;
document
.
querySelector
(
'div'
).
style
.
color
=
'red'
;
});
}
document
.
addEventListener
(
'DOMContentLoaded'
,
function
()
{
const
form
=
document
.
querySelector
(
'form'
);
form
.
addEventListener
(
'submit'
,
function
(
e
)
{
e
.
preventDefault
();
const
fileInput
=
document
.
querySelector
(
'input[type="file"]'
);
const
file
=
fileInput
.
files
[
0
];
if
(
!
file
)
{
alert
(
'Please select a file'
);
return
;
}
const
pathParts
=
window
.
location
.
pathname
.
split
(
'/'
);
const
matchId
=
pathParts
[
pathParts
.
length
-
1
]
||
''
;
uploadFileInChunks
(
file
,
matchId
);
});
});
</script>
</head>
</head>
<body>
<body>
<div
class=
"header"
>
<div
class=
"header"
>
...
@@ -171,7 +294,7 @@
...
@@ -171,7 +294,7 @@
{{ form.zip_file.label }}
{{ form.zip_file.label }}
{{ form.zip_file() }}
{{ form.zip_file() }}
<div
style=
"font-size: 0.9rem; color: #666; margin-top: 0.5rem;"
>
<div
style=
"font-size: 0.9rem; color: #666; margin-top: 0.5rem;"
>
<small>
Supported format: .zip (Max size: 5
0M
B)
</small>
<small>
Supported format: .zip (Max size: 5
G
B)
</small>
</div>
</div>
</div>
</div>
...
...
app/upload/routes.py
View file @
a959b49b
import
os
import
os
import
logging
import
logging
import
tempfile
import
shutil
from
flask
import
request
,
jsonify
,
render_template
,
redirect
,
url_for
,
flash
,
current_app
from
flask
import
request
,
jsonify
,
render_template
,
redirect
,
url_for
,
flash
,
current_app
from
flask_login
import
login_required
,
current_user
from
flask_login
import
login_required
,
current_user
from
flask_jwt_extended
import
jwt_required
,
get_jwt_identity
from
flask_jwt_extended
import
jwt_required
,
get_jwt_identity
...
@@ -603,7 +605,7 @@ def api_upload_zip_stream(match_id):
...
@@ -603,7 +605,7 @@ def api_upload_zip_stream(match_id):
filename
=
request
.
headers
.
get
(
'X-Filename'
,
'streamed_file.zip'
)
filename
=
request
.
headers
.
get
(
'X-Filename'
,
'streamed_file.zip'
)
# Validate file size
# Validate file size
if
total_size
>
current_app
.
config
.
get
(
'MAX_CONTENT_LENGTH'
,
2
*
1024
*
1024
*
1024
):
if
total_size
>
current_app
.
config
.
get
(
'MAX_CONTENT_LENGTH'
,
5
*
1024
*
1024
*
1024
):
return
jsonify
({
'error'
:
'File too large'
}),
413
return
jsonify
({
'error'
:
'File too large'
}),
413
# Update match status to uploading
# Update match status to uploading
...
@@ -899,4 +901,171 @@ def upload_fixture_zip(fixture_id):
...
@@ -899,4 +901,171 @@ def upload_fixture_zip(fixture_id):
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
error
(
f
"Fixture ZIP upload error: {str(e)}"
)
logger
.
error
(
f
"Fixture ZIP upload error: {str(e)}"
)
flash
(
'Upload processing failed'
,
'error'
)
flash
(
'Upload processing failed'
,
'error'
)
return
redirect
(
request
.
referrer
or
url_for
(
'main.fixtures'
))
return
redirect
(
request
.
referrer
or
url_for
(
'main.fixtures'
))
\ No newline at end of file
@
bp
.
route
(
'/chunk'
,
methods
=
[
'POST'
])
@
login_required
@
require_active_user
def
upload_chunk
():
"""Upload a chunk of a file"""
try
:
chunk
=
request
.
files
.
get
(
'chunk'
)
if
not
chunk
:
return
jsonify
({
'success'
:
False
,
'error'
:
'No chunk provided'
}),
400
chunk_index
=
int
(
request
.
form
.
get
(
'chunkIndex'
,
0
))
total_chunks
=
int
(
request
.
form
.
get
(
'totalChunks'
,
1
))
upload_id
=
request
.
form
.
get
(
'uploadId'
)
file_name
=
request
.
form
.
get
(
'fileName'
)
match_id
=
request
.
form
.
get
(
'matchId'
)
if
not
upload_id
or
not
file_name
:
return
jsonify
({
'success'
:
False
,
'error'
:
'Missing upload parameters'
}),
400
# Create temp directory for this upload
temp_dir
=
os
.
path
.
join
(
current_app
.
config
[
'TEMP_UPLOAD_FOLDER'
],
upload_id
)
os
.
makedirs
(
temp_dir
,
exist_ok
=
True
)
# Save chunk
chunk_path
=
os
.
path
.
join
(
temp_dir
,
f
'chunk_{chunk_index:06d}'
)
chunk
.
save
(
chunk_path
)
# Check if all chunks are received
received_chunks
=
len
([
f
for
f
in
os
.
listdir
(
temp_dir
)
if
f
.
startswith
(
'chunk_'
)])
if
received_chunks
==
total_chunks
:
# All chunks received, mark as ready for finalization
with
open
(
os
.
path
.
join
(
temp_dir
,
'metadata.txt'
),
'w'
)
as
f
:
f
.
write
(
f
'{file_name}
\n
{match_id or ""}
\n
'
)
return
jsonify
({
'success'
:
True
}),
200
except
Exception
as
e
:
logger
.
error
(
f
"Chunk upload error: {str(e)}"
)
return
jsonify
({
'success'
:
False
,
'error'
:
str
(
e
)}),
500
@
bp
.
route
(
'/finalize'
,
methods
=
[
'POST'
])
@
login_required
@
require_active_user
def
finalize_upload
():
"""Finalize chunked upload"""
try
:
data
=
request
.
get_json
()
upload_id
=
data
.
get
(
'uploadId'
)
file_name
=
data
.
get
(
'fileName'
)
match_id
=
data
.
get
(
'matchId'
)
if
not
upload_id
or
not
file_name
:
return
jsonify
({
'success'
:
False
,
'error'
:
'Missing parameters'
}),
400
temp_dir
=
os
.
path
.
join
(
current_app
.
config
[
'TEMP_UPLOAD_FOLDER'
],
upload_id
)
if
not
os
.
path
.
exists
(
temp_dir
):
return
jsonify
({
'success'
:
False
,
'error'
:
'Upload not found'
}),
404
# Read metadata
metadata_path
=
os
.
path
.
join
(
temp_dir
,
'metadata.txt'
)
if
not
os
.
path
.
exists
(
metadata_path
):
return
jsonify
({
'success'
:
False
,
'error'
:
'Upload not complete'
}),
400
with
open
(
metadata_path
,
'r'
)
as
f
:
lines
=
f
.
read
()
.
strip
()
.
split
(
'
\n
'
)
stored_file_name
=
lines
[
0
]
stored_match_id
=
lines
[
1
]
if
len
(
lines
)
>
1
else
''
# Assemble file
final_path
=
os
.
path
.
join
(
temp_dir
,
secure_filename
(
file_name
))
with
open
(
final_path
,
'wb'
)
as
outfile
:
chunk_files
=
sorted
([
f
for
f
in
os
.
listdir
(
temp_dir
)
if
f
.
startswith
(
'chunk_'
)])
for
chunk_file
in
chunk_files
:
chunk_path
=
os
.
path
.
join
(
temp_dir
,
chunk_file
)
with
open
(
chunk_path
,
'rb'
)
as
infile
:
shutil
.
copyfileobj
(
infile
,
outfile
)
# Now process as normal upload
file_handler
=
get_file_upload_handler
()
# Create a file-like object for the assembled file
class
FileLike
:
def
__init__
(
self
,
path
):
self
.
path
=
path
self
.
name
=
file_name
def
save
(
self
,
dst
):
shutil
.
move
(
self
.
path
,
dst
)
mock_file
=
FileLike
(
final_path
)
if
match_id
:
# ZIP upload for match
from
app.models
import
Match
match
=
Match
.
query
.
get_or_404
(
int
(
match_id
))
# Check permissions
if
not
current_user
.
is_admin
and
match
.
created_by
!=
current_user
.
id
:
return
jsonify
({
'success'
:
False
,
'error'
:
'Permission denied'
}),
403
# Update match status
match
.
zip_upload_status
=
'uploading'
db
.
session
.
commit
()
upload_record
,
error_message
=
file_handler
.
process_upload
(
mock_file
,
'zip'
,
current_user
.
id
,
int
(
match_id
)
)
if
error_message
:
match
.
zip_upload_status
=
'failed'
db
.
session
.
commit
()
return
jsonify
({
'success'
:
False
,
'error'
:
error_message
}),
400
match
.
zip_filename
=
upload_record
.
filename
match
.
zip_sha1sum
=
upload_record
.
sha1sum
match
.
zip_upload_status
=
'completed'
match
.
zip_upload_progress
=
100.00
match
.
set_active
()
db
.
session
.
commit
()
# Clean up temp files
shutil
.
rmtree
(
temp_dir
)
flash
(
f
'ZIP file uploaded successfully for Match #{match.match_number}! Match is now active.'
,
'success'
)
return
jsonify
({
'success'
:
True
,
'redirect'
:
url_for
(
'main.fixture_detail'
,
fixture_id
=
match
.
fixture_id
)
}),
200
else
:
# Fixture upload
upload_record
,
error_message
=
file_handler
.
process_upload
(
mock_file
,
'fixture'
,
current_user
.
id
)
if
error_message
:
return
jsonify
({
'success'
:
False
,
'error'
:
error_message
}),
400
# Parse fixture
fixture_parser
=
get_fixture_parser
()
success
,
parse_error
,
parsed_matches
=
fixture_parser
.
parse_fixture_file
(
upload_record
.
file_path
,
upload_record
.
original_filename
,
current_user
.
id
)
if
not
success
:
return
jsonify
({
'success'
:
False
,
'error'
:
parse_error
}),
400
success
,
save_error
,
match_ids
=
fixture_parser
.
save_matches_to_database
(
parsed_matches
,
upload_record
.
sha1sum
)
if
not
success
:
return
jsonify
({
'success'
:
False
,
'error'
:
save_error
}),
500
# Clean up temp files
shutil
.
rmtree
(
temp_dir
)
flash
(
f
'Successfully uploaded and parsed {len(match_ids)} matches!'
,
'success'
)
return
jsonify
({
'success'
:
True
,
'redirect'
:
url_for
(
'main.matches'
)
}),
200
except
Exception
as
e
:
logger
.
error
(
f
"Finalize upload error: {str(e)}"
)
return
jsonify
({
'success'
:
False
,
'error'
:
str
(
e
)}),
500
\ No newline at end of file
config.py
View file @
a959b49b
...
@@ -194,7 +194,7 @@ class Config:
...
@@ -194,7 +194,7 @@ class Config:
# File Upload Configuration - Use persistent directories
# File Upload Configuration - Use persistent directories
_persistent_dirs
=
_init_persistent_dirs
.
__func__
()
_persistent_dirs
=
_init_persistent_dirs
.
__func__
()
UPLOAD_FOLDER
=
get_config_value
(
'UPLOAD_FOLDER'
,
_persistent_dirs
[
'uploads'
])
UPLOAD_FOLDER
=
get_config_value
(
'UPLOAD_FOLDER'
,
_persistent_dirs
[
'uploads'
])
MAX_CONTENT_LENGTH
=
int
(
get_config_value
(
'MAX_CONTENT_LENGTH'
,
str
(
2
*
1024
*
1024
*
1024
)))
# 2
GB for large ZIP files
MAX_CONTENT_LENGTH
=
int
(
get_config_value
(
'MAX_CONTENT_LENGTH'
,
str
(
5
*
1024
*
1024
*
1024
)))
# 5
GB for large ZIP files
ALLOWED_FIXTURE_EXTENSIONS
=
{
'csv'
,
'xlsx'
,
'xls'
}
ALLOWED_FIXTURE_EXTENSIONS
=
{
'csv'
,
'xlsx'
,
'xls'
}
ALLOWED_ZIP_EXTENSIONS
=
{
'zip'
,
'7z'
,
'rar'
}
# Support more archive formats
ALLOWED_ZIP_EXTENSIONS
=
{
'zip'
,
'7z'
,
'rar'
}
# Support more archive formats
...
@@ -206,7 +206,7 @@ class Config:
...
@@ -206,7 +206,7 @@ class Config:
# Large File Upload Configuration
# Large File Upload Configuration
LARGE_FILE_THRESHOLD
=
int
(
get_config_value
(
'LARGE_FILE_THRESHOLD'
,
str
(
100
*
1024
*
1024
)))
# 100MB
LARGE_FILE_THRESHOLD
=
int
(
get_config_value
(
'LARGE_FILE_THRESHOLD'
,
str
(
100
*
1024
*
1024
)))
# 100MB
STREAMING_UPLOAD_ENABLED
=
get_config_value
(
'STREAMING_UPLOAD_ENABLED'
,
'True'
)
.
lower
()
==
'true'
STREAMING_UPLOAD_ENABLED
=
get_config_value
(
'STREAMING_UPLOAD_ENABLED'
,
'True'
)
.
lower
()
==
'true'
UPLOAD_TIMEOUT
=
int
(
get_config_value
(
'UPLOAD_TIMEOUT'
,
'
3600'
))
# 1 hour
timeout for large files
UPLOAD_TIMEOUT
=
int
(
get_config_value
(
'UPLOAD_TIMEOUT'
,
'
7200'
))
# 2 hours
timeout for large files
# Security Configuration
# Security Configuration
JWT_SECRET_KEY
=
get_config_value
(
'JWT_SECRET_KEY'
,
SECRET_KEY
)
JWT_SECRET_KEY
=
get_config_value
(
'JWT_SECRET_KEY'
,
SECRET_KEY
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment