forked from rotki/rotki
-
Notifications
You must be signed in to change notification settings - Fork 0
/
package.py
executable file
·1281 lines (1057 loc) · 45.4 KB
/
package.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
import argparse
import base64
import logging
import os
import platform
import shutil
import stat
import subprocess
import sys
import urllib.request
from collections.abc import Generator
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Any, Callable, Literal, Optional
from zipfile import ZipFile
from setuptools_scm import get_version
from packaging import version
rotki_version = get_version()
pyinstaller_version = os.environ.get('PYINSTALLER_VERSION', '5.7.0')
BACKEND_PREFIX = 'rotki-core'
SUPPORTED_ARCHS = [
'AMD64', # Windows
'x86_64',
'aarch64',
'arm64',
]
SUPPORTED_OSES = [
'Darwin',
'Windows',
'Linux',
]
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s: %(message)s',
)
logger = logging.getLogger('package')
MAC_CERTIFICATE = 'CERTIFICATE_OSX_APPLICATION'
WIN_CERTIFICATE = 'CERTIFICATE_WIN_APPLICATION'
CERTIFICATE_KEY = 'CSC_KEY_PASSWORD'
APPLE_ID = 'APPLEID'
APPLE_ID_PASS = 'APPLEIDPASS'
X64_APPL_RUST_TARGET = 'x86_64-apple-darwin'
ARM_APPL_RUST_TARGET = 'aarch64-apple-darwin'
def log_group(name: str) -> Callable:
def start_group(group_name: str) -> None:
if os.environ.get('CI'):
subprocess.call(f'echo ::group::"{group_name}"', shell=True)
else:
logger.info(f'\n\n-----{group_name}-----\n\n')
def end_group() -> None:
if os.environ.get('CI'):
subprocess.call('echo ::endgroup::', shell=True)
else:
logger.info('\n\n-----------------\n\n')
def decorate(fn: Callable) -> Callable:
def wrapper(*args: Any, **kwargs: Optional[Any]) -> None:
start_group(name)
fn(*args, **kwargs)
end_group()
return wrapper
return decorate
class Environment:
def __init__(self) -> None:
self.arch = platform.machine()
self.os = platform.system()
self.target_arch = os.environ.get('MACOS_BUILD_ARCH', self.arch)
if self.is_mac():
os.environ.setdefault('ONEFILE', '0')
self.rotki_version = rotki_version
if os.environ.get('ROTKI_VERSION') is None:
os.environ.setdefault('ROTKI_VERSION', self.rotki_version)
self.__certificate_mac = os.environ.get(MAC_CERTIFICATE)
self.__certificate_win = os.environ.get(WIN_CERTIFICATE)
self.__csc_password = os.environ.get(CERTIFICATE_KEY)
self.__appleid = os.environ.get(APPLE_ID)
self.__appleidpass = os.environ.get(APPLE_ID_PASS)
os.environ.pop(MAC_CERTIFICATE, None)
os.environ.pop(WIN_CERTIFICATE, None)
os.environ.pop(CERTIFICATE_KEY, None)
os.environ.pop(APPLE_ID, None)
os.environ.pop(APPLE_ID_PASS, None)
def macos_sign_env(self) -> dict[str, str]:
env = os.environ.copy()
if self.__csc_password is not None:
env.setdefault(CERTIFICATE_KEY, self.__csc_password)
if self.__appleid is not None:
env.setdefault(APPLE_ID, self.__appleid)
if self.__appleidpass is not None:
env.setdefault(APPLE_ID_PASS, self.__appleidpass)
return env
def macos_sign_vars(self) -> dict[str, Optional[str]]:
return {
'certificate': self.__certificate_mac,
'key': self.__csc_password,
'appleid': self.__appleid,
'appleidpass': self.__appleidpass,
}
def win_sign_env(self) -> dict[str, str]:
env = os.environ.copy()
if self.__csc_password is not None:
env.setdefault(CERTIFICATE_KEY, self.__csc_password)
return env
def win_sign_vars(self) -> dict[str, Optional[str]]:
return {
'certificate': self.__certificate_win,
'key': self.__csc_password,
}
@staticmethod
def sanity_check() -> None:
"""
Sanity check that exits if any os the secret environment variables is set when called.
"""
mac_cert = os.environ.get(MAC_CERTIFICATE)
win_cert = os.environ.get(WIN_CERTIFICATE)
key_pass = os.environ.get(CERTIFICATE_KEY)
appleid = os.environ.get(APPLE_ID)
appleidpass = os.environ.get(APPLE_ID_PASS)
sign_vars = [mac_cert, win_cert, key_pass, appleid, appleidpass]
if any(sign_vars):
logger.error('at least one of the secrets was set in the environment')
sys.exit(1)
@staticmethod
def check_repo() -> None:
output = subprocess.check_output(
'git branch --show-current',
encoding='utf8',
shell=True,
).strip()
if output != 'master':
logger.info(f'Current branch is {output}, skipping check')
return
unmerged_commits = subprocess.check_output(
'git rev-list HEAD..bugfixes --no-merges | wc -l | xargs echo -n',
encoding='utf8',
shell=True,
).strip()
if unmerged_commits != '0':
logger.error(
f'Found {unmerged_commits} in bugfixes that have not been merged for release',
)
sys.exit(1)
logger.info('branch is up to date with bugfixes')
def check_environment(self) -> None:
if self.arch not in SUPPORTED_ARCHS:
logger.error(
f'{self.arch} is not supported, packaging only supports {SUPPORTED_ARCHS}',
)
sys.exit(1)
if self.os not in SUPPORTED_OSES:
logger.error(
f'{platform.system()} is not supported, packaging only supports {SUPPORTED_OSES}',
)
sys.exit(1)
if not self.is_ci() and not os.environ.get('VIRTUAL_ENV'):
logger.error('The script should not run outside a virtual environment if not on CI')
sys.exit(1)
self.check_repo()
def is_mac(self) -> bool:
return self.os == 'Darwin'
def is_linux(self) -> bool:
return self.os == 'Linux'
def is_windows(self) -> bool:
return self.os == 'Windows'
def is_mac_runner(self) -> bool:
return self.is_mac() and self.is_ci()
def is_universal2(self) -> bool:
return self.is_mac_runner() or os.environ.get('MACOS_BUILD_ARCH') == 'universal2'
def is_x86_64(self) -> bool:
return self.arch in ['x86_64', 'AMD64']
def backend_suffix(self) -> str:
"""
Provides the os specific backend binary suffix.
In Linux and macOS the executables/binaries have no extension but in windows
they have the .exe extension. Since the suffix is used to match filenames
for windows the suffix also contains the extension to remove the need to
check again for windows and attach the extension in the method consumers.
:returns: The backends os specific filename suffix.
"""
if self.is_mac():
return 'macos'
if self.is_linux():
return 'linux'
if self.is_windows():
return 'windows.exe'
raise ValueError(f'Invalid os {self.os}')
@staticmethod
def mac_electron_env_set() -> bool:
return os.environ.get('MACOS_ELECTRON_ARCH') is not None
def get_frontend_env(self) -> dict[str, str]:
if os.environ.get('GH_TOKEN'):
logger.info('GH_TOKEN WAS SET')
else:
logger.info('NO GH_TOKEN SET')
env = os.environ.copy()
if self.is_mac() and not self.mac_electron_env_set() and not self.is_ci():
arch = 'x64' if self.is_x86_64() else 'arm64'
env.setdefault('MACOS_ELECTRON_ARCH', arch)
return env
@staticmethod
def is_ci() -> bool:
return os.environ.get('CI') is not None
class Checksum:
@staticmethod
def generate(env: Environment, path: Path) -> Path:
checksum_filename = f'{path.name}.sha512'
cmd = None
if env.is_mac():
cmd = f'shasum -a 512 {path.name} > {checksum_filename}'
elif env.is_linux():
cmd = f'sha512sum {path.name} > {checksum_filename}'
elif env.is_windows():
cmd = f'powershell.exe -command "Get-FileHash {path.name} -Algorithm SHA512 | Select-Object Hash | foreach {{$_.Hash}} | Out-File -FilePath {checksum_filename}"' # noqa: E501
else:
logger.error('unsupported system')
sys.exit(1)
ret_code = subprocess.call(cmd, cwd=path.parent, shell=True)
if ret_code != 0:
logger.error(f'could not generate sha512 sum for {path}')
sys.exit(1)
return path.parent / checksum_filename
class Storage:
def __init__(self) -> None:
self.working_directory = Path.cwd()
self.dist_directory = self.working_directory / 'dist'
self.build_directory = self.working_directory / 'build'
self.wheel_directory = self.build_directory / 'wheels'
self.temporary_directory = self.build_directory / 'temp'
self.backend_directory = self.build_directory / 'backend'
self.colibri_directory = self.build_directory / 'colibri'
self.build_directory.mkdir(parents=True, exist_ok=True)
def prepare_backend(self) -> None:
if self.backend_directory.exists():
shutil.rmtree(self.backend_directory)
self.backend_directory.mkdir(parents=True, exist_ok=True)
def prepare_temp(self) -> None:
self.temporary_directory.mkdir(exist_ok=True, parents=True)
def move_to_dist(self, file: Path) -> None:
self.dist_directory.mkdir(exist_ok=True)
logger.info(f'moving {file.name} to {self.dist_directory}')
shutil.move(src=file, dst=self.dist_directory)
def check_backend(self) -> None:
backend = self.backend_directory
if not backend.exists() or not any(backend.iterdir()):
logger.error(f'{backend} was missing or empty')
sys.exit(1)
def copy_to_dist(self, src: Path, sub_dir: Optional[str] = None) -> None:
self.dist_directory.mkdir(exist_ok=True)
dst = self.dist_directory
if sub_dir is not None:
dst = dst / sub_dir
logger.info(f'copying {src.name} to {dst}')
if src.is_dir():
shutil.copytree(src, dst)
else:
shutil.copy(src=src, dst=dst)
def clean(self) -> None:
shutil.rmtree(self.build_directory)
class WindowsPackaging:
def __init__(self, storage: Storage, env: Environment) -> None:
self.__storage = storage
self.__env = env
self.__p12 = ''
@log_group('miniupnpc windows')
def setup_miniupnpc(self) -> None:
"""
Downloads miniupnpc and extracts the dll in the virtual environment.
"""
miniupnc = 'miniupnpc_64bit_py39-2.2.24.zip'
python_dir = Path(
subprocess.check_output(
'python -c "import os, sys; print(os.path.dirname(sys.executable))"',
encoding='utf8',
shell=True,
).strip(),
)
if python_dir.name != 'Scripts':
python_dir = python_dir / 'Scripts'
dll_filename = 'miniupnpc.dll'
dll_path = python_dir / dll_filename
if dll_path.exists():
logger.info(f'miniupnpc.dll is already installed in {python_dir}')
return
build_dir = self.__storage.build_directory
os.chdir(build_dir)
zip_path = build_dir / miniupnc
extraction_dir = build_dir / 'miniupnpc'
extraction_dir.mkdir(exist_ok=True)
url = f'https://github.com/mrx23dot/miniupnp/releases/download/miniupnpd_2_2_24/{miniupnc}'
urllib.request.urlretrieve(url, zip_path) # noqa: S310
with ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(extraction_dir)
dll_file = extraction_dir / dll_filename
logger.info(f'moving {dll_file} to {python_dir}')
shutil.move(
src=dll_file,
dst=python_dir,
)
zip_path.unlink(missing_ok=True)
shutil.rmtree(extraction_dir)
@log_group('certificates')
def import_signing_certificates(self) -> bool:
"""
Imports the signing certificates from the environment variables
and prepares for signing.
The function will bail (exit 1) when the certificate is set but
no key has been passed in the configuration.
:return: True when the certificate and key are properly setup,
False when the certificate is not configured.
"""
sign_vars = self.__env.win_sign_vars()
certificate = sign_vars.get('certificate')
csc_password = sign_vars.get('key')
if os.environ.get('WIN_CSC_LINK') is not None and csc_password is not None:
logger.info('WIN_CSC_LINK already set skipping')
return True
if certificate is None:
logger.info(f'{WIN_CERTIFICATE} is not set skipping signing')
return False
if csc_password is None:
logger.error(f'Missing {CERTIFICATE_KEY}')
sys.exit(1)
logger.info('preparing to sign windows installer')
with NamedTemporaryFile(delete=False, suffix='.p12') as p12:
self.__p12 = p12.name
os.environ.setdefault('WIN_CSC_LINK', self.__p12)
certificate_data = base64.b64decode(certificate)
p12.write(certificate_data)
return True
def cleanup_certificate(self) -> None:
Path(self.__p12).unlink(missing_ok=True)
class MacPackaging:
def __init__(self, storage: Storage, environment: Environment) -> None:
self.__storage = storage
self.__environment = environment
self.__default_keychain: Optional[str] = None
self.__keychain = 'rotki-build.keychain'
self.__p12 = '/tmp/certificate.p12' # noqa: S108 # ask Kelsos if this canchange
@staticmethod
def unpack_wheels(
package_version: str,
plt: Literal['macosx_10_9_x86_64', 'macosx_11_0_arm64'],
directory: Path,
) -> None:
logger.info(f'preparing to download {package_version} wheel for {plt}')
directory.mkdir(exist_ok=True)
os.chdir(directory)
subprocess.call(
f'pip download {package_version} --platform {plt} --only-binary=:all:',
shell=True,
)
pkg_arch = 'x86_64' if plt.find('x86_64') >= 0 else 'arm64'
for file in directory.iterdir():
logger.info(f'checking if {file} package is {pkg_arch}')
if file.name.find(pkg_arch) >= 0:
logger.info(f'unpacking wheel {file}')
subprocess.call(f'wheel unpack {file}', shell=True)
file.unlink()
@staticmethod
def macos_link_archs(source: Path, destination: Path) -> None:
"""
Uses lipo to create a dual architecture library for macOS by merging a x86_64
and an arm64 library.
The order can be any but keep in mind that destination will become the dual
architecture one.
:param source: One of the two libraries that will be merged with lipo.
:param destination: The library that will become the dual architecture one.
"""
logger.info(f'creating fat binary {source} <-> {destination}')
ret_code = subprocess.call(
f'lipo -create -output {destination} {source} {destination}',
shell=True,
)
if ret_code != 0:
logger.error(f'failed to create a fat binary {source} {destination}')
sys.exit(1)
archs = subprocess.check_output(f'lipo -archs {destination}', encoding='utf-8', shell=True)
if archs.strip() != 'x86_64 arm64':
logger.error(f'{destination} was not a fat binary, only has {archs}')
sys.exit(1)
@staticmethod
def modify_wheel_metadata(wheel_metadata: Path) -> None:
"""
Modifies the tag in the wheel metadata file from x86_64 to universal2 so
that the repackaged wheel has the proper tag.
:param wheel_metadata: Path to the wheel metadata file
"""
with open(wheel_metadata) as file:
data = file.readlines()
for (index, line) in enumerate(data):
if not line.startswith('Tag'):
continue
data[index] = line.replace('x86_64', 'universal2')
with open(wheel_metadata, 'w') as file:
file.writelines(data)
def __download_patched_pip(self) -> Path:
"""
Downloads the patched pip version needed to create a universal2 virtual environment.
:return:
"""
pip_wheel = 'pip-22.1.2-py3-none-any.whl'
temporary_directory = self.__storage.temporary_directory
temporary_directory.mkdir(exist_ok=True)
wheel_file = temporary_directory / pip_wheel
urllib.request.urlretrieve( # noqa: S310
url=f'https://github.com/rotki/rotki-build/raw/main/{pip_wheel}',
filename=wheel_file,
)
return Path(wheel_file)
def __get_versions(self, packages: list[str]) -> dict[str, str]:
"""
Gets the versions of specified packages from requirements.txt
:param packages: A list of package names for which we need versions from
the requirements.txt
:returns: A Dict where the key is the package and the value is the package version
"""
requirements = self.__storage.working_directory / 'requirements.txt'
package_versions: dict[str, str] = {}
with open(requirements) as fp:
while True:
line = fp.readline()
if not line:
break
if len(line.strip()) == 0 or line.startswith('#'):
continue
requirement = line.split('#')[0]
req = requirement.split(';')
requirement = req[0]
if len(req) > 1 and req[1].strip() == "sys_platform == 'win32'":
continue
split_requirement = requirement.split('==')
package_name = split_requirement[0]
if package_name in packages:
package_version = split_requirement[1]
package_versions[package_name.strip()] = package_version.strip()
return package_versions
@log_group('universal2 wheel')
def __universal_repackage(self, package_name: str) -> None:
"""
Creates universal2 wheels for packages.
coincurve and cffi only provide architecture specific wheels (x86_64, arm64)
for macOS. To create a universal2 wheel we download the architecture
specific wheels, unpack them, then merge any native extension (*.so) using lipo.
Next we modify the tag so that re-packed wheel is properly tagged as universal
and we finally pack the wheel again.
"""
storage = self.__storage
logger.info(f'Preparing to merge {package_name} wheels')
versions = self.__get_versions(packages=[package_name])
build_directory = storage.build_directory
if not build_directory.exists():
build_directory.mkdir(parents=True)
temp = build_directory / 'temp'
temp.mkdir(parents=True, exist_ok=True)
wheels_directory = build_directory / 'wheels'
wheels_directory.mkdir(parents=True, exist_ok=True)
x86_64 = temp / 'x86_64'
arm64 = temp / 'arm64'
package = f'{package_name}=={versions.get(package_name)}'
self.unpack_wheels(package, 'macosx_10_9_x86_64', x86_64)
self.unpack_wheels(package, 'macosx_11_0_arm64', arm64)
for unpacked_wheel in x86_64.iterdir():
so_libs = unpacked_wheel.glob('**/*.so')
for so_lib in so_libs:
arm64_solib = next(arm64.glob(f'**/{so_lib.name}'))
self.macos_link_archs(destination=so_lib, source=arm64_solib)
metadata = next(unpacked_wheel.glob('**/WHEEL'))
logger.info(f'preparing to modify metadata: {metadata}')
self.modify_wheel_metadata(metadata)
ret_code = subprocess.call(
f'wheel pack {unpacked_wheel} -d {wheels_directory}',
shell=True,
)
if ret_code != 0:
logger.error(f'repack of {unpacked_wheel} failed')
sys.exit(1)
shutil.rmtree(x86_64)
shutil.rmtree(arm64)
@log_group('miniupnpc universal2 wheel')
def __build_miniupnpc_universal(self) -> None:
"""
Builds a universal2 wheel for miniupnpc.
Miniupnpc builds the native library libminiupnpc.a and then statically links
the native extension against that.
Unfortunately it is not possible to pass dual architecture flags to the compiler
and build a universal wheel in one step. Instead, we download the package source
and build the static library once for each architecture.
Then we use lipo to merge the two static libraries to one dual arch library
which is then used when we create the universal2 wheel.
"""
logger.info('Preparing to create universal2 wheels for miniupnpc')
self.__storage.prepare_temp()
temp = self.__storage.temporary_directory
build_directory = self.__storage.build_directory
package_name = 'miniupnpc'
versions = self.__get_versions([package_name])
libminiupnpc_dylib = 'libminiupnpc.dylib'
libminiupnpc_a = 'libminiupnpc.a'
miniupnpc_version = versions.get(package_name)
miniupnpc = f'{package_name}-{miniupnpc_version}'
miniupnpc_archive = f'{miniupnpc}.tar.gz'
miniupnpc_directory = build_directory / miniupnpc
os.chdir(build_directory)
download_result = subprocess.call(
f'pip download {package_name}=={miniupnpc_version}',
shell=True,
)
if download_result != 0:
logger.error(f'failed to download {package_name}')
sys.exit(1)
extract_result = subprocess.call(f'tar -xvf {miniupnpc_archive}', shell=True)
if extract_result != 0:
logger.error(f'failed to extract {package_name}')
sys.exit(1)
os.chdir(miniupnpc_directory)
env = os.environ.copy()
env.setdefault('CC', 'gcc -arch x86_64')
env.setdefault('MACOSX_DEPLOYMENT_TARGET', '10.9')
make_x86_result = subprocess.call('make', env=env, shell=True)
if make_x86_result != 0:
logger.error('make failed for x86_64')
sys.exit(1)
shutil.move(Path(libminiupnpc_dylib), temp)
shutil.move(Path(libminiupnpc_a), temp)
make_clean_result = subprocess.call('make clean', shell=True)
if make_clean_result != 0:
logger.error(f'failed to clean {package_name}')
sys.exit(1)
env = os.environ.copy()
env.setdefault('CC', 'gcc -arch arm64')
env.setdefault('MACOSX_DEPLOYMENT_TARGET', '11.0')
make_arm64_result = subprocess.call('make', env=env, shell=True)
if make_arm64_result != 0:
logger.error('make failed for arm64')
sys.exit(1)
self.macos_link_archs(
destination=Path(libminiupnpc_a),
source=temp / libminiupnpc_a,
)
self.macos_link_archs(
destination=Path(libminiupnpc_dylib),
source=temp / libminiupnpc_dylib,
)
wheel_build_result = subprocess.call('python setup.py bdist_wheel', shell=True)
if wheel_build_result != 0:
logger.error(f'failed to build {package_name} wheel')
sys.exit(1)
miniupnpc_dist = miniupnpc_directory / 'dist'
wheel_file = miniupnpc_dist / f'{miniupnpc}-cp39-cp39-macosx_10_9_universal2.whl'
wheel_directory = self.__storage.wheel_directory
wheel_directory.mkdir(exist_ok=True)
shutil.move(wheel_file, wheel_directory)
shutil.rmtree(temp)
archive_path = build_directory / miniupnpc_archive
if archive_path.exists():
archive_path.unlink()
if miniupnpc_directory.exists():
shutil.rmtree(miniupnpc_directory)
def prepare_wheels(self) -> None:
"""
Prepares the wheels with native extensions that require
special treatment.
"""
self.__build_miniupnpc_universal()
self.__universal_repackage('coincurve')
self.__universal_repackage('py-ed25519-zebra-bindings')
def install_wheels(self, install: Callable[[str], None]) -> None:
"""
Installs the wheels that are patched or modified.
Note that the order of installation is important for cffi/coincurve.
If cffi is not installed first then a version will be pulled from PyPI instead.
:param install: The install callable that is passed externally
"""
if self.__environment.target_arch == 'universal2':
patched_pip = self.__download_patched_pip()
install(f'{patched_pip} --force-reinstall')
wheel_directory = self.__storage.wheel_directory
os.chdir(wheel_directory)
for wheel in sorted(wheel_directory.iterdir()):
install(str(wheel))
# To avoid cytoolz RuntimeError on macOS
# "Cython required to build dev version of cytoolz."
install('cython')
@log_group('certificates')
def import_signing_certificates(self) -> bool:
"""
Imports the signing certificates from the environment variables
and prepares the keychain for signing
"""
sign_vars = self.__environment.macos_sign_vars()
certificate = sign_vars.get('certificate')
csc_password = sign_vars.get('key')
if os.environ.get('CSC_LINK') is not None and csc_password is not None:
logger.info('CSC_LINK already set skipping')
return True
if certificate is None:
logger.info(f'{MAC_CERTIFICATE} is not set skipping signing')
return False
if csc_password is None:
logger.error(f'Missing {CERTIFICATE_KEY}')
sys.exit(1)
logger.info('preparing to sign macOS binary')
p12 = self.__p12
keychain = self.__keychain
os.environ.setdefault('CSC_LINK', p12)
with open(p12, 'wb') as file:
certificate_data = base64.b64decode(certificate)
file.write(certificate_data)
self.__default_keychain = subprocess.check_output(
'security default-keychain',
shell=True,
encoding='utf8',
).strip()
# Create a keychain
subprocess.call(f'security create-keychain -p actions {keychain}', shell=True)
# Make the keychain the default so identities are found
subprocess.call(f'security default-keychain -s {keychain}', shell=True)
# Unlock the keychains
subprocess.call(f'security unlock-keychain -p actions {keychain}', shell=True)
subprocess.call(
f'security import {p12} -k {keychain} -P {csc_password} -T /usr/bin/codesign;',
shell=True,
)
subprocess.call(
f'security set-key-partition-list -S apple-tool:,apple:,codesign:,productbuild: -s -k actions {keychain}', # noqa: E501
shell=True,
)
return True
def cleanup_keychain(self) -> None:
default_keychain = self.__default_keychain
if default_keychain is not None:
subprocess.call(f'security default-keychain -s {default_keychain}', shell=True)
temp_certificate = Path(self.__p12)
if temp_certificate.exists():
temp_certificate.unlink(missing_ok=True)
os.environ.pop('CSC_LINK', None)
@log_group('signing')
def sign(self, paths: Generator[Path, None, None]) -> None:
"""
Signs all the contents of the directory created by PyInstaller
with the provided signing key/identity.
"""
if not self.import_signing_certificates():
return
identify = os.environ.get('IDENTITY')
for path in paths:
if not path.is_file():
continue
logger.debug(f'Preparing to sign {path}')
sign_ret_code = subprocess.call(
f'codesign --force --options runtime --entitlements ./packaging/entitlements.plist --sign {identify} {path} --timestamp', # noqa: E501
shell=True,
)
if sign_ret_code != 0:
logger.error(f'could not sign file {path}')
sys.exit(1)
verify_ret_code = subprocess.call(f'codesign --verify {path}', shell=True)
if verify_ret_code != 0:
logger.error(f'signature verification failed at {path}')
sys.exit(1)
self.cleanup_keychain()
@log_group('zip')
def zip(self) -> None:
"""
Creates a zip from the directory that contains the backend, checksums it
and moves them to the dist/ directory.
"""
backend_directory = self.__storage.backend_directory
os.chdir(backend_directory)
zip_filename = f'{BACKEND_PREFIX}-{self.__environment.rotki_version}-macos.zip'
ret_code = subprocess.call(
f'zip -vr "{zip_filename}" {BACKEND_PREFIX}/ -x "*.DS_Store"',
shell=True,
)
if ret_code != 0:
logger.error('zip failed')
sys.exit(1)
zip_file = backend_directory / zip_filename
checksum_file = Checksum.generate(self.__environment, zip_file)
self.__storage.move_to_dist(zip_file)
self.__storage.move_to_dist(checksum_file)
class BackendBuilder:
def __init__(
self,
storage: Storage,
env: Environment,
mac: Optional[MacPackaging],
win: Optional[WindowsPackaging],
) -> None:
self.__mac = mac
self.__win = win
self.__storage = storage
self.__env = env
def clean(self) -> None:
storage = self.__storage
os.chdir(storage.working_directory)
if not self.__env.is_windows():
clean_result = subprocess.call('make clean', shell=True)
if clean_result != 0:
logger.error('failed to make clean')
sys.exit(1)
else:
if storage.temporary_directory.exists():
shutil.rmtree(storage.temporary_directory)
if storage.backend_directory.exists():
shutil.rmtree(storage.backend_directory)
if storage.dist_directory.exists():
shutil.rmtree(storage.dist_directory)
@log_group('pip install')
def pip_install(self, what: str, use_pep_517: bool = True) -> None:
"""
Calls pip install using subprocess.
:param what: anything that goes after pip install
"""
base_command = 'pip install '
if use_pep_517 is False:
base_command += '--no-use-pep517 '
ret_code = subprocess.call(
f'{base_command} {what}',
shell=True,
cwd=self.__storage.working_directory,
)
if ret_code != 0:
logger.error(f'could not run "pip install {what}"')
sys.exit(1)
def __build_pyinstaller_bootloader(self, tag_version: str) -> None:
"""
Clones PyInstaller from source, checks out a specific version
and builds the bootloader.
This is required for architectures other than x86_64 that do not
have prebuilt bootloaders.
:param tag_version: The version of PyInstaller to check out
"""
build_directory = self.__storage.build_directory
build_directory.mkdir(exist_ok=True)
os.chdir(build_directory)
git_clone_ret_code = subprocess.call(
'git clone https://github.com/pyinstaller/pyinstaller.git',
shell=True,
)
if git_clone_ret_code != 0:
logger.error('could not clone pyinstaller')
sys.exit(1)
pyinstaller_directory = build_directory / 'pyinstaller'
os.chdir(pyinstaller_directory)
checkout_ret_code = subprocess.call(f'git checkout v{tag_version}', shell=True)
if checkout_ret_code != 0:
logger.error(f'failed to checkout pyinstaller v{tag_version} tag')
sys.exit(1)
bootloader_directory = pyinstaller_directory / 'bootloader'
os.chdir(bootloader_directory)
flag = '--no-universal2' if self.__env.target_arch != 'universal2' else '--universal2'
build_ret_code = subprocess.call(f'./waf all {flag}', shell=True)
if build_ret_code != 0:
logger.error(f'failed to build pyinstaller bootloader for {flag}')
sys.exit(1)
os.chdir(pyinstaller_directory)
install_ret_code = subprocess.call('pip install .', shell=True)
if install_ret_code != 0:
logger.error('failed to install pyinstaller')
sys.exit(1)
def __sanity_check(self) -> None:
os.chdir(self.__storage.working_directory)
ret_code = subprocess.call(
'python -c "import sys;from rotkehlchen.db.misc import detect_sqlcipher_version; version = detect_sqlcipher_version();sys.exit(0) if version == 4 else sys.exit(1)"', # noqa: E501
shell=True,
)
# Due to https://github.com/rotki/pysqlcipher3/issues/1 verification might
# fail in macOS machines where OpenSSL is not properly setup in the path.
# In this case we can set the SKIP_SQLCIPHER_VERIFICATION to unblock the script
# since the error does not affect runtime.
if ret_code != 0 and os.environ.get('SKIP_SQLCIPHER_VERIFICATION') is None:
logger.error('could not verify sqlcipher v4')
sys.exit(1)
def __move_to_dist(self) -> None:
"""
Generates a checksum for the backend and moves it along with a copy of the backend
to the dist/ directory. The backend file is copied instead of moved because the original
will be needed for electron-builder.
"""
backend_directory = self.__storage.backend_directory
os.chdir(backend_directory)
filename = f'{BACKEND_PREFIX}-{self.__env.rotki_version}-{self.__env.backend_suffix()}'
file = backend_directory / filename
checksum_file = Checksum.generate(self.__env, file)
self.__storage.copy_to_dist(file)
self.__storage.move_to_dist(checksum_file)
def _move_colibri_to_dist(self) -> None:
"""Move the colibri binary to dist"""
colibri_dir = self.__storage.colibri_directory
self.__storage.copy_to_dist(colibri_dir / 'bin', sub_dir='colibri')
@log_group('backend_build')
def build(self) -> None:
"""
Packages the backend using PyInstaller and creates the rust binary
"""
# When packaging on macOS one of the dependencies will try to access
# GITHUB_REF during pip install and will throw an error. For this reason
# The variable is temporarily removed and then restored.
github_ref = os.environ.get('GITHUB_REF')
os.environ.pop('GITHUB_REF', None)
self.__env.sanity_check()
mac = self.__mac
if mac is not None and self.__env.is_universal2():
logger.info('Doing preparation for universal2 wheels')
mac.prepare_wheels()
mac.install_wheels(self.pip_install)
win = self.__win
if win is not None:
win.setup_miniupnpc()
os.chdir(self.__storage.working_directory)
# This flag only works with the patched version of pip.
# https://github.com/kelsos/pip/tree/patched
os.environ.setdefault('PIP_FORCE_MACOS_UNIVERSAL2', '1')
self.pip_install('.', use_pep_517=True)
os.environ.pop('PIP_FORCE_MACOS_UNIVERSAL2', None)
if github_ref is not None:
os.environ.setdefault('GITHUB_REF', github_ref)
self.__create_rust_binary()
self.__install_pyinstaller()
self.__sanity_check()
self.__package()