Skip to content

Commit 18c72e9

Browse files
committed
added scryfall api calls and db connection, big commit
1 parent 95f3121 commit 18c72e9

6 files changed

+229
-1
lines changed

.gitignore

+4-1
Original file line numberDiff line numberDiff line change
@@ -1 +1,4 @@
1-
.env
1+
.env
2+
json_dump/
3+
*.json
4+
__pycache__/

db_handler.py

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import os
2+
import json
3+
from dotenv import load_dotenv
4+
import mysql.connector
5+
6+
load_dotenv()
7+
DB_HOST = os.getenv("DB_HOST")
8+
DB_USER = os.getenv("DB_USER")
9+
DB_PW = os.getenv("DB_PW")
10+
DB_NAME = os.getenv("DB_NAME")
11+
12+
mydb = mysql.connector.connect(
13+
host=DB_HOST,
14+
user=DB_USER,
15+
password=DB_PW
16+
)
17+
18+
cursor = mydb.cursor()
19+
cursor.execute(f"USE {DB_NAME}")
20+
21+
def dbAdd_bulkDef():
22+
return
23+
24+
def dbAdd_bulkAll():
25+
return

download_handler.py

+39
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import scryfall_handler
2+
import requests
3+
from tqdm import tqdm
4+
5+
headers = {'User-Agent': 'ScryFetcher/0.1', 'Accept': '*/*'}
6+
7+
def default_bulk_dl():
8+
dc_bulk = scryfall_handler.get_dcbulk()
9+
10+
dl_uri = dc_bulk["download_uri"]
11+
dcdl_res = requests.get(dl_uri, stream=True, headers=headers)
12+
13+
total_size = dc_bulk["size"]
14+
block_size = 1024
15+
progress_size = 0
16+
17+
with tqdm(total=total_size, unit="B", unit_scale=True, desc="Downloading", bar_format="{l_bar}{bar:30} | {n_fmt}/{total_fmt} [{rate_fmt}] | {remaining} left") as progress_bar:
18+
with open("./json_dump/dc_bulk.json", "wb") as file:
19+
for data in dcdl_res.iter_content(block_size):
20+
file.write(data)
21+
progress_size += len(data)
22+
progress_bar.update(len(data))
23+
24+
def all_bulk_dl():
25+
ac_bulk = scryfall_handler.get_acbulk()
26+
27+
dl_uri = ac_bulk["download_uri"]
28+
acdl_res = requests.get(dl_uri, stream=True, headers=headers)
29+
30+
total_size = ac_bulk["size"]
31+
block_size = 1024
32+
progress_size = 0
33+
34+
with tqdm(total=total_size, unit="B", unit_scale=True, desc="Downloading", bar_format="{l_bar}{bar:30}| {n_fmt}/{total_fmt} [{rate_fmt}] | {remaining} left") as progress_bar:
35+
with open("./json_dump/ac_bulk.json", "wb") as file:
36+
for data in acdl_res.iter_content(block_size):
37+
file.write(data)
38+
progress_size += len(data)
39+
progress_bar.update(len(data))

main.py

+45
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import os
2+
import download_handler
3+
import scryfall_handler
4+
import db_handler
5+
6+
def error_handling(input):
7+
print("Something went wrong")
8+
9+
def help_menu():
10+
print("This is the help menu")
11+
12+
def eval_input(num_input):
13+
if (num_input == "0"):
14+
print("Default? y/n")
15+
input_default = input()
16+
if(input_default == "y" or input_default == "Y"):
17+
download_handler.default_bulk_dl()
18+
db_handler.dbAdd_bulkDef
19+
elif(input_default == "n" or input_default == "N"):
20+
download_handler.all_bulk_dl()
21+
db_handler.dbAdd_bulkAll()
22+
else:
23+
error_handling(input_default)
24+
return
25+
elif (num_input == "1"):
26+
scryfall_handler.get_bulk_json()
27+
scryfall_handler.get_type_jsons()
28+
elif (num_input == "h"):
29+
help_menu()
30+
return
31+
else:
32+
error_handling(num_input)
33+
return
34+
35+
def main():
36+
# if os.path.exists('.env'):
37+
# pass
38+
39+
print ("0: Download Bulks\n1: Get data jsons\nh: Help Menu")
40+
choice_input = input()
41+
42+
eval_input(choice_input)
43+
44+
if __name__ == "__main__":
45+
main()

mtgjson_handler.py

Whitespace-only changes.

scryfall_handler.py

+116
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
import requests
2+
import json
3+
import time
4+
from dotenv import load_dotenv
5+
import os
6+
7+
load_dotenv()
8+
SF_API_BULKS = os.getenv('SF_API_BULKS')
9+
headers = {'User-Agent': 'ScryFetcher/0.1', 'Accept': '*/*'}
10+
11+
def get_dcbulk():
12+
bulk_response = requests.get(SF_API_BULKS, headers=headers)
13+
print(f"bulk_response: {bulk_response}")
14+
time.sleep(0.1)
15+
16+
bulk_response = bulk_response.json()
17+
response_data = bulk_response.get("data")
18+
dc_uri = response_data[2].get("uri")
19+
20+
dc_response = requests.get(dc_uri, headers=headers)
21+
print(f"dc_response: {dc_response}")
22+
time.sleep(0.1)
23+
24+
dc_response = dc_response.json()
25+
26+
return dc_response
27+
28+
def get_acbulk():
29+
bulk_response = requests.get(SF_API_BULKS, headers=headers)
30+
print(f"bulk_response: {bulk_response}")
31+
time.sleep(0.1)
32+
33+
bulk_response = bulk_response.json()
34+
response_data = bulk_response.get("data")
35+
ac_uri = response_data[3].get("uri")
36+
37+
ac_response = requests.get(ac_uri, headers=headers)
38+
print(f"ac_response: {ac_response}")
39+
time.sleep(0.1)
40+
41+
ac_response = ac_response.json()
42+
43+
return ac_response
44+
45+
def get_bulk_json():
46+
bulk_response = requests.get(SF_API_BULKS, headers=headers)
47+
print(f"bulk_response: {bulk_response}")
48+
time.sleep(0.1)
49+
50+
with open("./json_dump/misc/api_bulk.json", "w", encoding="utf-8") as outfile:
51+
outfile.write(bulk_response.text)
52+
53+
def get_type_jsons():
54+
SF_CATALOG_SUPER = os.getenv("SF_CATALOG_SUPER")
55+
SF_CATALOG_CARD = os.getenv("SF_CATALOG_CARD")
56+
SF_CATALOG_ARTIFACT = os.getenv("SF_CATALOG_ARTIFACT")
57+
SF_CATALOG_BATTLE = os.getenv("SF_CATALOG_BATTLE")
58+
SF_CATALOG_CREATURE = os.getenv("SF_CATALOG_CREATURE")
59+
SF_CATALOG_ENCHANTMENT = os.getenv("SF_CATALOG_ENCHANTMENT")
60+
SF_CATALOG_LAND = os.getenv("SF_CATALOG_LAND")
61+
SF_CATALOG_PLANESWALKER = os.getenv("SF_CATALOG_PLANESWALKER")
62+
SF_CATALOG_SPELL = os.getenv("SF_CATALOG_SPELL")
63+
64+
supertype_res = requests.get(SF_CATALOG_SUPER, headers=headers)
65+
print(f"supertype_res: {supertype_res}")
66+
time.sleep(0.1)
67+
with open("./json_dump/misc/super_type.json", "w", encoding="utf-8") as outfile:
68+
outfile.write(supertype_res.text)
69+
70+
cardtype_res = requests.get(SF_CATALOG_CARD, headers=headers)
71+
print(f"cardtype_res: {cardtype_res}")
72+
time.sleep(0.1)
73+
with open("./json_dump/misc/card_type.json", "w", encoding="utf-8") as outfile:
74+
outfile.write(cardtype_res.text)
75+
76+
artifacttype_res = requests.get(SF_CATALOG_ARTIFACT, headers=headers)
77+
print(f"artifacttype_res: {artifacttype_res}")
78+
time.sleep(0.1)
79+
with open("./json_dump/misc/artifact_type.json", "w", encoding="utf-8") as outfile:
80+
outfile.write(artifacttype_res.text)
81+
82+
battle_res = requests.get(SF_CATALOG_BATTLE, headers=headers)
83+
print(f"battle_res: {battle_res}")
84+
time.sleep(0.1)
85+
with open("./json_dump/misc/battle_type.json", "w", encoding="utf-8") as outfile:
86+
outfile.write(battle_res.text)
87+
88+
creaturetype_res = requests.get(SF_CATALOG_CREATURE, headers=headers)
89+
print(f"creaturetype_res: {creaturetype_res}")
90+
time.sleep(0.1)
91+
with open("./json_dump/misc/creatue_type.json", "w", encoding="utf-8") as outfile:
92+
outfile.write(creaturetype_res.text)
93+
94+
enchantmenttype_res = requests.get(SF_CATALOG_ENCHANTMENT, headers=headers)
95+
print(f"enchantmenttype_res: {enchantmenttype_res}")
96+
time.sleep(0.1)
97+
with open("./json_dump/misc/enchantment_type.json", "w", encoding="utf-8") as outfile:
98+
outfile.write(enchantmenttype_res.text)
99+
100+
landtype_res = requests.get(SF_CATALOG_LAND, headers=headers)
101+
print(f"landtype_res: {landtype_res}")
102+
time.sleep(0.1)
103+
with open("./json_dump/misc/land_type.json", "w", encoding="utf-8") as outfile:
104+
outfile.write(landtype_res.text)
105+
106+
planeswalkertype_res = requests.get(SF_CATALOG_PLANESWALKER, headers=headers)
107+
print(f"planeswalkertype_res: {planeswalkertype_res}")
108+
time.sleep(0.1)
109+
with open("./json_dump/misc/planeswalker_type.json", "w", encoding="utf-8") as outfile:
110+
outfile.write(planeswalkertype_res.text)
111+
112+
spelltype_res = requests.get(SF_CATALOG_SPELL, headers=headers)
113+
print(f"spelltype_res: {spelltype_res}")
114+
time.sleep(0.1)
115+
with open("./json_dump/misc/spell_type.json", "w", encoding="utf-8") as outfile:
116+
outfile.write(spelltype_res.text)

0 commit comments

Comments
 (0)