Bsaber AI
Origin
Overview
Step 1 - Downloading Data
import requests
import json
before = "2024-01-28T00%3A00%3A01%2B00%3A00"
continuing = True
while continuing:
url = "https://api.beatsaver.com/maps/latest?automapper=false&before=" + before + "&pageSize=100"
headers = {
'Accept': 'application/json',
'Accept-Charset': 'utf-8'
}
response = requests.get(url, headers)
obj = response.content
obj1 = json.loads(obj)
maps = obj1['docs']
for i in range(0, len(maps)):
curMap = maps[i]
map_id = curMap['id']
metadata = curMap['metadata'] # Derminating
stats = curMap['stats'] # Determine based on that
uploadTime = curMap['uploaded'] # Used for next Setting it
versions = curMap['versions'] # Data needed to keep
if stats['upvotes'] > 10 and metadata['duration'] > 80:
print(map_id, stats['upvotes'], stats['downvotes'], metadata['duration'])
# Extract relevant fields
map_data = {
"id": curMap["id"],
"metadata": curMap["metadata"],
"stats": curMap["stats"],
"uploaded": curMap["uploaded"],
"versions": curMap["versions"]
}
# Write the extracted data to a JSON file
output_file_name = map_id + ".json"
with open(output_file_name, 'w') as json_file:
json.dump(map_data, json_file, indent=4)
print("Data written to", output_file_name)
# con += 1
continuing = len(maps) > 2
before = uploadTime
print("Finished on " , before)Step 2 - Cleaning Data
Notable Links
Last updated