Skip to content

Commit

Permalink
Merge pull request #1 from moogoo78/devel
Browse files Browse the repository at this point in the history
merge
  • Loading branch information
moogoo78 authored Nov 18, 2024
2 parents 8bac86b + 7910fc7 commit fd38669
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 319 deletions.
97 changes: 67 additions & 30 deletions app/blueprints/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,18 +355,23 @@ def index():
@login_required
def record_list():
site = current_user.site

collection_ids = [x.id for x in site.collections]
current_page = int(request.args.get('page', 1))
q = request.args.get('q', '')
collectors = request.args.get('collectors', '')
taxa = request.args.get('taxa', '')


#stmt = select(Unit.id, Unit.accession_number, Entity.id, Entity.field_number, Person.full_name, Person.full_name_en, Entity.collect_date, Entity.proxy_taxon_scientific_name, Entity.proxy_taxon_common_name) \
#.join(Unit, Unit.entity_id==Entity.id, isouter=True) \
#.join(Person, Entity.collector_id==Person.id, isouter=True)

stmt = make_admin_record_query(dict(request.args))

#if phase := site.data.get('phase'):
# if phase == 1:

# apply collection filter by site
stmt = stmt.filter(Record.collection_id.in_(collection_ids))

Expand Down Expand Up @@ -450,22 +455,62 @@ def record_list():
cat_lists= UserList.query.filter(UserList.user_id==current_user.id, UserList.entity_id==entity_id).all()

#print(r, flush=True)
item = {
'collection_id': r[11],
'accession_number': r[1] or '',
'record_id': r[2],
'field_number': r[4] or '',
'collector': collector,
'collect_date': r[5].strftime('%Y-%m-%d') if r[5] else '',
#'scientific_name': taxon_obj.full_scientific_name,
#'common_name': taxon_obj.common_name,
'taxon': taxon,
'locality': ','.join(loc_list),
'entity_id': entity_id,
'category_lists': [{'category_id': x.category_id, 'text': x.category.name} for x in cat_lists],
'mod_time': mod_time,
'image_url': image_url,
}
if phase := site.data.get('phase'):
if phase == 1:
sd = record.source_data
fields = site.data['admin']['record_list_fields']
taxon = {
'full_scientific_name': sd.get(fields['full_scientific_name'], ''),
'common_name': sd.get(fields['common_name'], ''),
}
collector = sd.get(fields['collector'])
if collector_zh := sd.get(fields['collector_zh']):
collector = f'{collector_zh} ({collector})'

loc_list = []
for i in [fields['country'], fields['county']]:
if na:= sd.get(i):
loc_list.append(na)
if na := sd.get(fields['localityc']):
loc = na
if l := sd.get(fields['locality']):
loc = f'{na} ({l})'
loc_list.append(loc)

item = {
'collection_id': r[11],
'accession_number': sd.get(fields['accession_number']),
'record_id': r[2],
'field_number': '',
'collector': collector,
'collect_date': r[5].strftime('%Y-%m-%d') if r[5] else '',
#'scientific_name': taxon_obj.full_scientific_name,
#'common_name': taxon_obj.common_name,
'taxon': taxon,
'locality': ','.join(loc_list),
'entity_id': entity_id,
'category_lists': [{'category_id': x.category_id, 'text': x.category.name} for x in cat_lists],
'mod_time': mod_time,
'image_url': image_url,
}
else:
item = {
'collection_id': r[11],
'accession_number': r[1] or '',
'record_id': r[2],
'field_number': r[4] or '',
'collector': collector,
'collect_date': r[5].strftime('%Y-%m-%d') if r[5] else '',
#'scientific_name': taxon_obj.full_scientific_name,
#'common_name': taxon_obj.common_name,
'taxon': taxon,
'locality': ','.join(loc_list),
'entity_id': entity_id,
'category_lists': [{'category_id': x.category_id, 'text': x.category.name} for x in cat_lists],
'mod_time': mod_time,
'image_url': image_url,
}

items.append(item)

plist = Person.query.filter(Person.is_collector==True).all()
Expand Down Expand Up @@ -704,23 +749,14 @@ def api_create_admin_record(collection_id):
resp.headers.add('Access-Control-Allow-Methods', '*')
return resp

# DEPRECATE
@admin.route('/api/units/<int:item_id>', methods=['DELETE'])
def api_unit_delete(item_id):
return jsonify({'message': 'ok',})

# DEPRECATE
@admin.route('/api/identificatios/<int:item_id>', methods=['DELETE'])
def api_identification_delete(item_id):
return jsonify({'message': 'ok', 'next_url': url_for('admin.')})


@admin.route('/api/units/<int:unit_id>/media/<int:media_id>', methods=['DELETE'])
def api_delete_unit_media(unit_id, media_id):
if mo := session.get(MultimediaObject, media_id):
serv_key = current_app.config['SERVICE_KEY']
site = get_current_site(request)
res = delete_image(site, serv_key, mo.file_url)
serv_keys = site.get_service_keys()
upload_conf = site.data['admin']['uploads']
res = delete_image(upload_conf, serv_keys, mo.file_url)
mo.unit.cover_image_id = None
session.delete(mo)
session.commit()
Expand All @@ -741,8 +777,9 @@ def api_post_unit_media(unit_id):

if f := request.files['file']:
site = get_current_site(request)
serv_key = current_app.config['SERVICE_KEY']
res = upload_image(site, serv_key, f, f'u{unit.id}')
serv_keys = site.get_service_keys()
upload_conf = site.data['admin']['uploads']
res = upload_image(upload_conf, serv_keys, f, f'u{unit.id}')
if res['error'] == '' and res['message'] == 'ok':
sd = {'originalFilename': f.filename}
if exif := res.get('exif'):
Expand Down
8 changes: 5 additions & 3 deletions app/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,13 +163,15 @@ def compilemessages():
@flask_app.cli.command('import')
@click.argument('csv_file')
@click.argument('collection_id')
def import_record(csv_file, collection_id):

def import_record(csv_file, collection_id, record_group_id):
# NOQA: record_group_id
# TODO: auto add record_group
import csv
from app.helpers_data import import_phase0

with open(csv_file, newline='') as csvfile:
spamreader = csv.DictReader(csvfile)
counter = 0
for row in spamreader:
import_phase0(row, int(collection_id))
#TODO: trunc each row
import_phase0(row, int(collection_id), record_group_id)
7 changes: 6 additions & 1 deletion app/helpers_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,11 +197,16 @@ def export_specimen_dwc_csv():
print(t3-t2, t2-t1, flush=True)


def import_phase0(data, collection_id):
def import_phase0(data, collection_id, record_group_id):
r = Record(source_data=data, collection_id=collection_id)
session.add(r)
session.commit()

m = RecordGroupMap(record_id=r.id, group_id=record_group_id)
session.add(m)
session.commit()

u = Unit(collection_id=collection_id, record_id=r.id)
session.add(u)
session.commit()

57 changes: 26 additions & 31 deletions app/helpers_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,31 +31,30 @@
('o', (4096, 4096)),
)

def delete_image(site, service_key, file_url):

def delete_image(upload_conf, service_keys, file_url):
ret = {
'message': 'ok',
'error': '',
}

uploads = site.data['admin']['uploads']
keys = decode_key(service_key)

if uploads['storage'] == 'aws':
if upload_conf['storage'] == 'aws':
s3_client = boto3.client(
's3',
aws_access_key_id=keys[site.name]['accessKeyID'],
aws_secret_access_key=keys[site.name]['secretAccessKey'],
region_name=uploads['region'],
aws_access_key_id=service_keys['accessKeyID'],
aws_secret_access_key=service_keys['secretAccessKey'],
region_name=upload_conf['region'],
)
file_prefix = f"https://{uploads['bucket']}.s3.{uploads['region']}.amazonaws.com/{uploads['prefix']}/"

file_prefix = f"https://{upload_conf['bucket']}.s3.{upload_conf['region']}.amazonaws.com/{upload_conf['prefix']}/"

filename = file_url.replace(file_prefix, '')
for thumb in THUMB_MAP:
k = filename.replace('-m.jpg', f'-{thumb[0]}.jpg')
object_key = f"{uploads['prefix']}/{k}"
object_key = f"{upload_conf['prefix']}/{k}"
# print(object_key, flush=True)
response = s3_client.delete_object(
Bucket=uploads['bucket'],
Bucket=upload_conf['bucket'],
Key=object_key,
)

Expand Down Expand Up @@ -93,13 +92,13 @@ def sanity(text):
return exif


def upload_image(site, service_key, file_, item_id):
def upload_image(upload_conf, service_keys, file_, item_id):
# default upload to cloud storage

ret = {
'message': 'ok',
'error': '',
'exif': {}
'exif': {},
}
# save to uploads
#filename = secure_filename(file_.filename)
Expand All @@ -112,35 +111,28 @@ def upload_image(site, service_key, file_, item_id):
with NamedTemporaryFile() as temp:
temp.write(file_.read())

uploads = site.data['admin']['uploads']
keys = decode_key(service_key)
#print(uploads, decode_key(service_key), flush=True)
if uploads['storage'] == 'aws':
if upload_conf['storage'] == 'aws':
s3_client = boto3.client(
's3',
aws_access_key_id=keys[site.name]['accessKeyID'],
aws_secret_access_key=keys[site.name]['secretAccessKey'],
region_name=uploads['region'],
aws_access_key_id=service_keys['accessKeyID'],
aws_secret_access_key=service_keys['secretAccessKey'],
region_name=upload_conf['region'],
)

h = gen_time_hash()
ret['file_url'] = f"https://{uploads['bucket']}.s3.{uploads['region']}.amazonaws.com/{uploads['prefix']}/{item_id}-{h}-m.jpg"
ret['file_url'] = f"https://{upload_conf['bucket']}.s3.{upload_conf['region']}.amazonaws.com/{upload_conf['prefix']}/{item_id}-{h}-m.jpg"

one_exif = {}
# make thumb
for thumb in THUMB_MAP:
#stem = Path(filename).stem
target_filename = f'{item_id}-{h}-{thumb[0]}.jpg'

#Path(current_app.config['UPLOAD_FOLDER'], filename)
#target_path = thumb_source_path.joinpath(Path(target_filename))
#print (source_path, target_path)
#target_path = Path(current_app.config['UPLOAD_FOLDER'], target_filename)
#file.save(Path(current_app.config['UPLOAD_FOLDER'], filename))

object_key = target_filename
if pref := uploads['prefix']:
object_key = f'{pref}/{target_filename}'
object_key = f'{item_id}-{h}-{thumb[0]}.jpg'
if pref := upload_conf['prefix']:
object_key = f'{pref}/{object_key}'

img = Image.open(temp.name)
if len(one_exif) == 0:
Expand All @@ -155,14 +147,17 @@ def upload_image(site, service_key, file_, item_id):
img.save(in_memory_file, 'JPEG')
in_memory_file.seek(0)

if uploads['storage'] == 'aws':
if upload_conf['storage'] == 'aws':
r = s3_client.upload_fileobj(
in_memory_file,
uploads['bucket'],
upload_conf['bucket'],
object_key,
ExtraArgs={'ACL': 'public-read'}
)
current_app.logger.debug(f'upload to {object_key}')
try:
current_app.logger.debug(f'upload to {object_key}')
except:
print(f'upload to {object_key}')

# except ClientError as e:
# #logging.error(e)
Expand Down
10 changes: 9 additions & 1 deletion app/models/site.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from flask import (
current_app,
)
from sqlalchemy import (
select,
Table,
Expand Down Expand Up @@ -33,7 +36,9 @@
session,
TimestampMixin,
)

from app.utils import (
decode_key,
)
# organization_collection = Table(
# 'organization_collection',
# Base.metadata,
Expand Down Expand Up @@ -145,6 +150,9 @@ def get_type_specimens(self):
cids = [x.id for x in self.collections]
return get_or_set_type_specimens(cids)

def get_service_keys(self):
return decode_key(current_app.config['SERVICE_KEY'])[self.name]


class Organization(Base, TimestampMixin):
'''
Expand Down
2 changes: 1 addition & 1 deletion compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ services:
traefik:
image: traefik:3.2
command:
- "--api.insecure=true"
#- "--api.insecure=true"
- "--providers.docker=true"
- "--entrypoints.web.address=:80"
- "--entrypoints.websecure.address=:443"
Expand Down
Loading

0 comments on commit fd38669

Please sign in to comment.