Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Syncing of TwinBru assets, #16

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
308 changes: 188 additions & 120 deletions blender_bg_scripts/model_validation_bg_render.py

Large diffs are not rendered by default.

106 changes: 106 additions & 0 deletions blender_bg_scripts/pack_twinbru_material.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
"""
This script is used to pack a material from TwinBru to a blenderkit asset.
It imports textures from the unzipped folder , creates a node tree and assigns the textures to the material.
"""

import sys
import os
import bpy
import json

# import utils- add path
dir_path = os.path.dirname(os.path.realpath(__file__))
parent_path = os.path.join(dir_path, os.path.pardir)
sys.path.append(parent_path)
from blenderkit_server_utils import paths


if __name__ == "__main__":
datafile = sys.argv[-1]
print(f"datafile: {datafile}")
with open(datafile, "r", encoding="utf-8") as f:
data = json.load(f)
twinbru_asset = data["asset_data"]
temp_folder = data["temp_folder"]
result_filepath = data["result_filepath"]
print(f"temp_folder: {temp_folder}")

# convert name - remove _ and remove the number that comes last in name
readable_name = twinbru_asset["name"].split("_")
# capitalize the first letter of each word
readable_name = " ".join(word.capitalize() for word in readable_name[:-1])

# create a new material
material = bpy.data.materials.new(name=readable_name)
material.use_nodes = True
material.blend_method = "BLEND"
material.shadow_method = "HASHED"
material.diffuse_color = (1, 1, 1, 1)
# ensure the material is saved
material.use_fake_user = True
# create the node tree
nodes = material.node_tree.nodes
links = material.node_tree.links

# set nodes spacing
node_gap_x = 400
node_gap_y = 300
# find the output node
output_node = nodes.get("Material Output")
if not output_node:
output_node = nodes.new(type="ShaderNodeOutputMaterial")
output_node.location = (node_gap_x, 0)

# find Principled BSDF node
principled_bsdf = nodes.get("Principled BSDF")
if not principled_bsdf:
principled_bsdf = nodes.new(type="ShaderNodeBsdfPrincipled")
principled_bsdf.location = (0, 0)

# Link the Principled BSDF to the Output Material node
links.new(principled_bsdf.outputs[0], output_node.inputs[0])

# Get the texture file names
texture_directory = os.path.join(temp_folder, "pbr-pol")
texture_files = os.listdir(texture_directory)
mapping_substrings = {
"BASE": "Base Color",
"MTL": "Metallic",
"ROUGH": "Roughness",
"ALPHA": "Alpha",
"NRM": "Normal",
}
index = 0
texture_nodes = []
for substring, mapping in mapping_substrings.items():
for texture_file in texture_files:
if substring + "." in texture_file:
print(f"texture_file: {texture_file}")
texture_path = os.path.join(texture_directory, texture_file)
texture_node = nodes.new(type="ShaderNodeTexImage")
texture_node.location = (
-2 * node_gap_x,
node_gap_y * 2 - index * node_gap_y,
)
texture_node.image = bpy.data.images.load(texture_path)
# set anything besides color to non color
if mapping != "Base Color":
texture_node.image.colorspace_settings.name = "Non-Color"
# normal maps need a normal map node
if mapping == "Normal":
normal_map = nodes.new(type="ShaderNodeNormalMap")
normal_map.location = (
-node_gap_x,
texture_node.location[1],
)
links.new(texture_node.outputs[0], normal_map.inputs["Color"])
links.new(normal_map.outputs[0], principled_bsdf.inputs[mapping])
else:
links.new(texture_node.outputs[0], principled_bsdf.inputs[mapping])
index += 1
texture_nodes.append(texture_node)

# Pack all .blend textures
bpy.ops.file.pack_all()
# save the material
bpy.ops.wm.save_as_mainfile(filepath=result_filepath)
100 changes: 62 additions & 38 deletions blenderkit_server_utils/cloudflare_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
import boto3
from botocore.exceptions import NoCredentialsError


class CloudflareStorage:
def __init__(self, access_key, secret_key, endpoint_url, region_name='auto'):
def __init__(self, access_key, secret_key, endpoint_url, region_name="auto"):
"""
Initializes the connection to Cloudflare's S3-compatible storage.

Expand All @@ -16,11 +17,13 @@ def __init__(self, access_key, secret_key, endpoint_url, region_name='auto'):
:param region_name: Region name, default is 'auto' for Cloudflare.
"""
self.session = boto3.session.Session()
self.client = self.session.client('s3',
region_name=region_name,
endpoint_url=endpoint_url,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
self.client = self.session.client(
"s3",
region_name=region_name,
endpoint_url=endpoint_url,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
)

def upload_file(self, file_name, bucket_name, object_name=None):
"""
Expand Down Expand Up @@ -52,15 +55,16 @@ def list_all_folders(self, bucket_name):
:param bucket_name: Name of the Cloudflare R2 bucket.
:return: A set of all folder prefixes.
"""
paginator = self.client.get_paginator('list_objects_v2')
paginator = self.client.get_paginator("list_objects_v2")
folders = set()

# Use a paginator to fetch all objects
for page in paginator.paginate(Bucket=bucket_name, Delimiter='/'):
for prefix in page.get('CommonPrefixes', []):
folders.add(prefix['Prefix'])
for page in paginator.paginate(Bucket=bucket_name, Delimiter="/"):
for prefix in page.get("CommonPrefixes", []):
folders.add(prefix["Prefix"])

return folders

def folder_exists(self, bucket_name, folder_name):
"""
Check if a folder exists in a specified bucket.
Expand All @@ -70,17 +74,19 @@ def folder_exists(self, bucket_name, folder_name):
:return: True if the folder exists, False otherwise.
"""
# Ensure the folder name ends with a '/' to accurately match the folder structure
if not folder_name.endswith('/'):
folder_name += '/'
if not folder_name.endswith("/"):
folder_name += "/"

response = self.client.list_objects_v2(
Bucket=bucket_name,
Prefix=folder_name,
MaxKeys=1 # We only need to find one object to confirm the folder exists
MaxKeys=1, # We only need to find one object to confirm the folder exists
)
return 'Contents' in response and len(response['Contents']) > 0
return "Contents" in response and len(response["Contents"]) > 0

def upload_folder(self, local_folder_path, bucket_name, cloudflare_folder_prefix=''):
def upload_folder(
self, local_folder_path, bucket_name, cloudflare_folder_prefix=""
):
"""
Recursively uploads a folder and its contents to Cloudflare R2, maintaining the folder structure,
and creates an index file in the top-level directory listing all uploaded files.
Expand All @@ -95,24 +101,38 @@ def upload_folder(self, local_folder_path, bucket_name, cloudflare_folder_prefix
for filename in files:
local_path = os.path.join(root, filename)
relative_path = os.path.relpath(local_path, start=local_folder_path)
cloudflare_object_name = os.path.join(cloudflare_folder_prefix, relative_path)
cloudflare_object_name = cloudflare_object_name.replace('\\', '/')
cloudflare_object_name = os.path.join(
cloudflare_folder_prefix, relative_path
)
cloudflare_object_name = cloudflare_object_name.replace("\\", "/")

# Upload the file
if self.upload_file(local_path, bucket_name, cloudflare_object_name):
uploaded_files.append(cloudflare_object_name) # Add successful uploads to the list
uploaded_files.append(
cloudflare_object_name
) # Add successful uploads to the list

# After all files are uploaded, create and upload the index.json file
index_file_path = '/tmp/index.json' if cloudflare_folder_prefix else cloudflare_folder_prefix + 'index.json'
with open(index_file_path, 'w') as index_file:
# only do this if there are files to upload
if not uploaded_files:
print("No files found to upload.")
return
index_file_path = (
"/tmp/index.json"
if cloudflare_folder_prefix
else cloudflare_folder_prefix + "index.json"
)
with open(index_file_path, "w") as index_file:
json.dump(uploaded_files, index_file)

# Upload the index file
cloudflare_object_name = os.path.join(cloudflare_folder_prefix, 'index.json')
cloudflare_object_name = cloudflare_object_name.replace('\\', '/')
cloudflare_object_name = os.path.join(cloudflare_folder_prefix, "index.json")
cloudflare_object_name = cloudflare_object_name.replace("\\", "/")
self.upload_file(index_file_path, bucket_name, cloudflare_object_name)

print(f"Uploaded index file to Cloudflare R2 storage at {cloudflare_folder_prefix}index.json")
print(
f"Uploaded index file to Cloudflare R2 storage at {cloudflare_folder_prefix}index.json"
)

def delete_folder_contents(self, bucket_name, folder_prefix):
"""
Expand All @@ -122,17 +142,19 @@ def delete_folder_contents(self, bucket_name, folder_prefix):
:param folder_prefix: The prefix of the folder to delete contents from. Must end with '/'.
"""
# Ensure the folder prefix ends with '/' to avoid accidentally deleting unintended objects
if not folder_prefix.endswith('/'):
folder_prefix += '/'
if not folder_prefix.endswith("/"):
folder_prefix += "/"

# List all objects in the folder
response = self.client.list_objects_v2(Bucket=bucket_name, Prefix=folder_prefix)
objects = response.get('Contents', [])
objects = response.get("Contents", [])

# If there are objects to delete, prepare and execute the deletion
if objects:
delete_keys = {'Objects': [{'Key': obj['Key']} for obj in objects]}
delete_response = self.client.delete_objects(Bucket=bucket_name, Delete=delete_keys)
delete_keys = {"Objects": [{"Key": obj["Key"]} for obj in objects]}
delete_response = self.client.delete_objects(
Bucket=bucket_name, Delete=delete_keys
)
print(f"Deleted objects: {delete_response}")
else:
print("No objects found to delete.")
Expand All @@ -144,26 +166,28 @@ def delete_old_files(self, bucket_name, x_days):
:param bucket_name: The name of the Cloudflare R2 bucket.
:param x_days: The age threshold in days for deleting files.
"""
paginator = self.client.get_paginator('list_objects_v2')
paginator = self.client.get_paginator("list_objects_v2")
delete_before_date = datetime.now(timezone.utc) - timedelta(days=x_days)

# Prepare a batch delete operation
delete_batch = {'Objects': []}
delete_batch = {"Objects": []}

# Iterate through all objects in the bucket
for page in paginator.paginate(Bucket=bucket_name):
for obj in page.get('Contents', []):
for obj in page.get("Contents", []):
# If the object is older than the specified days, mark it for deletion
if obj['LastModified'] < delete_before_date:
delete_batch['Objects'].append({'Key': obj['Key']})
if obj["LastModified"] < delete_before_date:
delete_batch["Objects"].append({"Key": obj["Key"]})

# Perform the deletion in batches of 1000 (S3 limit)
if len(delete_batch['Objects']) >= 1000:
self.client.delete_objects(Bucket=bucket_name, Delete=delete_batch)
delete_batch = {'Objects': []} # Reset batch
if len(delete_batch["Objects"]) >= 1000:
self.client.delete_objects(
Bucket=bucket_name, Delete=delete_batch
)
delete_batch = {"Objects": []} # Reset batch

# Delete any remaining objects in the last batch
if delete_batch['Objects']:
if delete_batch["Objects"]:
self.client.delete_objects(Bucket=bucket_name, Delete=delete_batch)

print("Old files deleted.")
print("Old files deleted.")
Loading
Loading