pulling all info from a new source, new data sheet with all relevant card information.
This commit is contained in:
parent
b145ccd549
commit
dd3c61dc6c
@ -25,9 +25,12 @@ cd lorcana
|
|||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
|
|
||||||
- The only funtionality at the moment is grabbing the pngs for all the cards in the game and saving them into a local folder. You can do that just by running the main script in the root of this directory
|
```bash
|
||||||
|
python getData
|
||||||
|
```
|
||||||
|
- This retrieves all the card data from lorcania.com
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python main.py
|
python getData
|
||||||
```
|
```
|
||||||
This currently runs all the scripts in the src folder and grabs all current card images for the game.
|
- This saves all the images specified in the data.csv to a local folder
|
Can't render this file because it is too large.
|
@ -26,20 +26,20 @@ for card_id in range(1, 449):
|
|||||||
data_json = json.loads(data_page)
|
data_json = json.loads(data_page)
|
||||||
|
|
||||||
# Extract card information
|
# Extract card information
|
||||||
card_info = data_json['props']['card']
|
data_draft = data_json['props']['card']
|
||||||
|
|
||||||
# Define headers for CSV file
|
# Define headers for CSV file
|
||||||
if card_id == 1: # Write headers only for the first card
|
if card_id == 1: # Write headers only for the first card
|
||||||
headers = list(card_info.keys())
|
headers = list(data_draft.keys())
|
||||||
with open('card_info.csv', mode='w', newline='', encoding='utf-8') as file:
|
with open('data_draft.csv', mode='w', newline='', encoding='utf-8') as file:
|
||||||
writer = csv.writer(file)
|
writer = csv.writer(file)
|
||||||
writer.writerow(headers)
|
writer.writerow(headers)
|
||||||
|
|
||||||
# Define rows for CSV file
|
# Define rows for CSV file
|
||||||
rows = [list(card_info.values())]
|
rows = [list(data_draft.values())]
|
||||||
|
|
||||||
# Append data to CSV file
|
# Append data to CSV file
|
||||||
with open('card_info.csv', mode='a', newline='', encoding='utf-8') as file:
|
with open('data_draft.csv', mode='a', newline='', encoding='utf-8') as file:
|
||||||
writer = csv.writer(file)
|
writer = csv.writer(file)
|
||||||
writer.writerows(rows)
|
writer.writerows(rows)
|
||||||
|
|
48
mian.py
48
mian.py
@ -1,48 +0,0 @@
|
|||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def check_python_installation():
|
|
||||||
try:
|
|
||||||
python_version = subprocess.check_output(
|
|
||||||
["python", "--version"], stderr=subprocess.STDOUT, universal_newlines=True
|
|
||||||
)
|
|
||||||
print(f"{python_version}")
|
|
||||||
return True
|
|
||||||
except FileNotFoundError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def install_python():
|
|
||||||
print("Python is not installed. Please download and install it from:")
|
|
||||||
print("https://www.python.org/downloads/")
|
|
||||||
input("Press Enter to continue after installing Python...")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def run_scripts_in_folder(folder_path):
|
|
||||||
if not check_python_installation():
|
|
||||||
install_python()
|
|
||||||
|
|
||||||
# Get a list of all files in the folder
|
|
||||||
script_files = [f for f in os.listdir(folder_path) if f.endswith('.py')]
|
|
||||||
|
|
||||||
if not script_files:
|
|
||||||
print(f"No Python scripts found in {folder_path}")
|
|
||||||
return
|
|
||||||
|
|
||||||
for script_file in script_files:
|
|
||||||
script_path = os.path.join(folder_path, script_file)
|
|
||||||
|
|
||||||
# Run the script using subprocess
|
|
||||||
try:
|
|
||||||
subprocess.run(['python', script_path], check=True)
|
|
||||||
print(f"Script {script_file} executed successfully.")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f"Error executing script {script_file}: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Specify the folder containing the scripts
|
|
||||||
scripts_folder = "src"
|
|
||||||
|
|
||||||
run_scripts_in_folder(scripts_folder)
|
|
||||||
|
|
||||||
print("Done.")
|
|
38
saveImages.py
Normal file
38
saveImages.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import os
|
||||||
|
import pandas as pd
|
||||||
|
import requests
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Function to clean filename by removing problematic characters
|
||||||
|
def clean_filename(filename):
|
||||||
|
return re.sub(r'[\\/*?:"<>|]', '', filename)
|
||||||
|
|
||||||
|
# Read the CSV file
|
||||||
|
data = pd.read_csv('data.csv')
|
||||||
|
|
||||||
|
# Create a folder named 'images' if it doesn't exist
|
||||||
|
images_folder = 'images'
|
||||||
|
os.makedirs(images_folder, exist_ok=True)
|
||||||
|
|
||||||
|
# Iterate through the rows
|
||||||
|
for index, row in data.iterrows():
|
||||||
|
card_set_id = row['card_set_id']
|
||||||
|
card_id = row['id']
|
||||||
|
name = row['name']
|
||||||
|
image_url = row['image']
|
||||||
|
|
||||||
|
# Create a folder for the card_set_id if it doesn't exist
|
||||||
|
card_set_folder = os.path.join(images_folder, str(card_set_id))
|
||||||
|
os.makedirs(card_set_folder, exist_ok=True)
|
||||||
|
|
||||||
|
# Get the image
|
||||||
|
response = requests.get(image_url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
# Clean the filename
|
||||||
|
image_filename = clean_filename(f"{card_id} - {name}.webp") # Adjust the extension if needed
|
||||||
|
image_path = os.path.join(card_set_folder, image_filename)
|
||||||
|
with open(image_path, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
|
print(f"Image saved: {image_filename}")
|
||||||
|
else:
|
||||||
|
print(f"Failed to download image for {name}")
|
@ -1,25 +0,0 @@
|
|||||||
import os
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def download_images(base_url, set_name, num_images, local_folder):
|
|
||||||
# Ensure the local folder exists
|
|
||||||
os.makedirs(local_folder, exist_ok=True)
|
|
||||||
|
|
||||||
for i in range(205, num_images + 1):
|
|
||||||
image_url = f"{base_url}/{set_name}/{i}.png" #might have to add the set tag here
|
|
||||||
response = requests.get(image_url)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
with open(os.path.join(local_folder, f"{i}.png"), 'wb') as file:
|
|
||||||
file.write(response.content)
|
|
||||||
print(f"Downloaded {i}.png")
|
|
||||||
else:
|
|
||||||
print(f"Failed to download {i}.png - Status code: {response.status_code}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
base_url = "https://www.lorcanawiz.com/images" #/thefirstchapter/TFC-205.png
|
|
||||||
set_name = "thefirstchapter"
|
|
||||||
num_images = 216
|
|
||||||
local_folder = f"images/{set_name}/"
|
|
||||||
|
|
||||||
download_images(base_url, set_name, num_images, local_folder)
|
|
@ -1,25 +0,0 @@
|
|||||||
import os
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def download_images(base_url, set_name, num_images, local_folder):
|
|
||||||
# Ensure the local folder exists
|
|
||||||
os.makedirs(local_folder, exist_ok=True)
|
|
||||||
|
|
||||||
for i in range(1, num_images + 1):
|
|
||||||
image_url = f"{base_url}/{set_name}/TFC-{i}.png" #might have to add the set tag here
|
|
||||||
response = requests.get(image_url)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
with open(os.path.join(local_folder, f"{i}.png"), 'wb') as file:
|
|
||||||
file.write(response.content)
|
|
||||||
print(f"Downloaded {i}.png")
|
|
||||||
else:
|
|
||||||
print(f"Failed to download {i}.png - Status code: {response.status_code}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
base_url = "https://www.lorcanawiz.com/images" #/thefirstchapter/TFC-205.png
|
|
||||||
set_name = "thefirstchapter"
|
|
||||||
num_images = 204
|
|
||||||
local_folder = f"images/{set_name}/"
|
|
||||||
|
|
||||||
download_images(base_url, set_name, num_images, local_folder)
|
|
@ -1,27 +0,0 @@
|
|||||||
import os
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def download_images(base_url, set_name, num_images, local_folder):
|
|
||||||
# Ensure the local folder exists
|
|
||||||
os.makedirs(local_folder, exist_ok=True)
|
|
||||||
|
|
||||||
for i in range(1, num_images + 1):
|
|
||||||
image_url = f"{base_url}/{set_name}/{i}.png"
|
|
||||||
response = requests.get(image_url)
|
|
||||||
|
|
||||||
if response.status_code == 200 and 'image' in response.headers.get('Content-Type', ''):
|
|
||||||
with open(os.path.join(local_folder, f"{i}.png"), 'wb') as file:
|
|
||||||
file.write(response.content)
|
|
||||||
print(f"Downloaded {i}.png")
|
|
||||||
elif response.status_code == 404:
|
|
||||||
print(f"Image {i}.png not found - Status code: {response.status_code}")
|
|
||||||
else:
|
|
||||||
print(f"Failed to download {i}.png - Status code: {response.status_code}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
base_url = "https://www.lorcanawiz.com/images"
|
|
||||||
set_name = "riseofthefloodborn"
|
|
||||||
num_images = 250
|
|
||||||
local_folder = f"images/{set_name}/"
|
|
||||||
|
|
||||||
download_images(base_url, set_name, num_images, local_folder)
|
|
@ -1,25 +0,0 @@
|
|||||||
import os
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def download_images(base_url, set_name, num_images, local_folder):
|
|
||||||
# Ensure the local folder exists
|
|
||||||
os.makedirs(local_folder, exist_ok=True)
|
|
||||||
|
|
||||||
for i in range(1, num_images + 1):
|
|
||||||
image_url = f"{base_url}/{set_name}/{i}.png" #might have to add the set tag here
|
|
||||||
response = requests.get(image_url)
|
|
||||||
|
|
||||||
if response.status_code == 200 and 'image' in response.headers.get('Content-Type', ''):
|
|
||||||
with open(os.path.join(local_folder, f"{i}.png"), 'wb') as file:
|
|
||||||
file.write(response.content)
|
|
||||||
print(f"Downloaded {i}.png")
|
|
||||||
else:
|
|
||||||
print(f"Failed to download {i}.png - Status code: {response.status_code}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
base_url = "https://www.lorcanawiz.com/images" #/thefirstchapter/205.png
|
|
||||||
set_name = "promos"
|
|
||||||
num_images = 24
|
|
||||||
local_folder = f"images/{set_name}/"
|
|
||||||
|
|
||||||
download_images(base_url, set_name, num_images, local_folder)
|
|
Loading…
Reference in New Issue
Block a user