-
Notifications
You must be signed in to change notification settings - Fork 357
/
Copy pathexport_unity_package.py
executable file
·3587 lines (3060 loc) · 133 KB
/
export_unity_package.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/python
#
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""A script to build Unity packages without Unity.
This script enables plugins and assets created for Unity to be packaged into
a Unity package which can be loaded in Unity and supports the appropriate meta
data. The script takes a config file and the root directory of the assets
to be packed.
The config supports multiple Unity package definitions, and each contains
an inclusive set of files with wildcard support grouped by platform settings.
Example usage:
export_unity_package.py --config_file=exports.json \
--guids_file=guids.json \
--plugins_version="1.0.0" \
--assets_dir="/tmp/unityBundle"
The json file should have the following format:
{
"packages": [
{
# Name of the Unity package to export.
"name": "yourpackage.unitypackage",
# Whether this package should be exported for the sections enabled.
# If this is empty the package will always be built. If this
# specifies a list of sections it will only be built if the
# enabled_sections flag contains the enabled sections in this list.
"sections": ["some_section"],
# Files to import into the package.
"imports": [
{
# Whether this package should be exported for the sections
# enabled.
# If this is empty the package will always be built. If this
# specifies a list of sections it will only be built if the
# enabled_sections flag contains the enabled sections in
# this list.
"sections": ["some_section"],
# How / when to import (load) the file in Unity.
# * PluginImporter specifies the file should be imported as
# a C# DLL for the platforms specified by the "platforms"
# field.
# * DefaultImporter specifies that the file should be
# imported using Unity's default settings for the file
# type derived from the file extension.
# This field defaults to "DefaultImporter".
"importer": "PluginImporter",
# Platforms targeted when the PluginImporter is used.
# Can be a list containing any Unity platform name e.g:
# * Any: Meta platform that targets all platforms.
# * Editor: Unity editor.
# * Standalone: Meta platform that targets all desktop
# platforms including the editor.
# * Android
# * iOS
# * tvOS
"platforms": ["Editor", "Standalone", "Android", "iOS"],
# CPUs supported by standalone or editor platforms when the
# "PluginImporter" is the importer and platforms contains
# one of "Standalone", "LinuxUniversal", "OSXUniversal"
# or "Editor".
"cpu": "AnyCPU", # (or "x86" or "x86_64")
# Labels to apply to the asset. These are used to find
# assets quickly in the asset database and change import
# setting via plugins like the Play Services Resolver.
"labels": [
"gvh",
...
],
# Asset metadata YAML to override the existing metadata for
# the file. This should either be a string containing YAML
# or a JSON dictionary.
# For example, the following uses a JSON dictionary to
# disable the plugin for the "Any" platform.
"override_metadata": {
"PluginImporter": {
"platformData": {
"Any": {
"enabled": 0
}
}
},
# Asset metadata YAML to override the existing metadata for
# the file for Unity Package Manager package. This should
# either be a string containing YAML or a JSON dictionary.
# For example, the following uses a JSON dictionary to
# enable the plugin for the "Editor" platform for UPM
# package.
"override_metadata_upm": {
"PluginImporter": {
"platformData": {
"Editor": {
"enabled": 1
}
}
},
# Files to import with the importer and label settings
# applied.
# Each item in this list can be one of the following:
# - Filename: Includes just this file.
# - Directory: Recursively includes the directory.
# - Unix shell-style wildcard (glob): Includes all files
# matching the pattern.
"paths": [
"Firebase/Plugins/App.dll",
...
]
},
...
],
# Transitively includes all files from the set of packages specified
# by this list.
"includes": [ "anotherpackage.unitypackage" ],
# List of regular expression strings which exclude files included
# in this plugin. This applies to this plugin if it's exported and
# all plugins that depend upon it.
"exclude_paths": [
"Firebase/Samples/Auth/.*",
],
# Whether to export this package (enabled by default).
"export": 1,
# Path of the manifest in the package with the basename of the
# manifest file. If a path isn't specified, a manifest isn't
# generated.
# e.g
# My/Cool/ShaderToolkit
# would be expanded to...
# My/Cool/${package_name}_v${version}_manifest.txt
#
# ${package_name} is derived from the output filename and
# ${version} is specified via the command line --plugins_version
# argument.
"manifest_path": "Firebase/Editor/FirebaseAnalytics",
# Path to the readme document. The file must be included through
# FLAGS.assets_dir, FLAGS.assets_zip or FLAG.asset_file, and is not
# required to be in "imports" section.
"readme": "path/to/a/Readme.md",
# Path to the changelog document. The file must be included through
# FLAGS.assets_dir, FLAGS.assets_zip or FLAG.asset_file, and is not
# required to be in "imports" section.
"changelog": "path/to/a/Changelog.md",
# Path to the license document. The file must be included through
# FLAGS.assets_dir, FLAGS.assets_zip or FLAG.asset_file, and is not
# required to be in "imports" section.
"license": "path/to/a/License.md",
# Path to the documents. The path can be a specific file or a folder
# containing index.md. The file/folder must be included through
# FLAGS.assets_dir, FLAGS.assets_zip or FLAG.asset_file, and is not
# required to be in "imports" section.
"documentaiton": "path/to/a/Document.md",
# Common package information used to generate package manifest.
# Required if "export_upm" is 1
"common_manifest": {
# Package name used in the manifest file. Required if
# "export_upm" is 1.
"name": "com.google.firebase.app",
# Display name for the package. Optional.
"display_name": "Firebase App (Core)",
# Description for the package. Optional.
# This can be a single string or a list of strings which will be
# joined into single string for manifest.
"description": "This is core library for Firebase",
"description": [ "This is core library ", "for Firebase" ],
# A list of keywords for the package. Potentially used for
# filtering or searching. Optional.
# Add "vh-name:legacy_manifest_name" to link this package to
# a renamed package imported as an asset package.
# Note that this script will automatically add
# "vh-name:current_package_name" to keywords.
"keywords": [ "Google", "Firebase", "vh-name:MyOldName"],
# Author information for the package. Optional.
"author": {
"name" : "Google Inc",
"email" : "[email protected]",
"url": "https://firebase.google.com/"
}
},
# Whether to export this package for Unity Package Manager, i.e.
# .tgz tarball (disabled by default)
"export_upm": 0,
# Package configuration for Unity Package Manager package. Optional.
"upm_package_config": {
# Manifest information for package.json used by Unity Package
# Manager. Optional.
"manifest" : {
# This defines the package's minimum supported Unity version
# in the form "major.minor", for example "2019.1". The
# minimum valid version here is "2017.1". Optional.
"unity": "2017.1",
# A map containing this package's additional dependencies
# where the keys are package names and the values are
# specific versions, e.g. "1.2.3". This script will also
# automatically includes packages listed in "includes", if
# it is set to export for UPM.
"dependencies": {
"com.some.third-party-package": "1.2.3"
}
}
},
},
...
],
# Optional build configurations for the project.
# All packages in the project are exported for each build configuration
# listed in this section.
"builds": [
{
# Name of this build config for logging purposes.
"name": "debug",
# Whether this build config should be executed for the sections enabled.
# If this is empty, it will always be executed.
"sections": ["debug"],
# Sections that should be enabled when exporting packages with this
# build config. This set of sections are added to the sections
# specified on the command line before packages are exported.
"enabled_sections": ["early_access"],
# List of regular expressions and replacement strings applied to
# package names before they're exported.
# For example:
# { "match": "foo(.*)\\.bar", "replacement": "foo\\1Other.bar" }
# Changes the package name "foo123.bar" to "foo123Other.bar".
"package_name_replacements": [
{
"match": "(.*)(\\.unitypackage)",
"replacement": "\\1EarlyAccess\\2"
},
]
},
...
]
}
"""
import collections
import copy
import glob
import gzip
import json
import os
import platform
import re
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import traceback
import zipfile
from absl import app
from absl import flags
from absl import logging
import packaging.version
import yaml
FLAGS = flags.FLAGS
flags.DEFINE_string("config_file", None, ("Config file that describes how to "
"pack the unity assets."))
flags.DEFINE_string("guids_file", None, "Json file with stable guids cache.")
flags.DEFINE_string("plugins_version", None, "Version of the plugins to "
"package.")
flags.DEFINE_boolean("use_tar", True, "Whether to use the tar command line "
"application, when available, to generate archives rather "
"than Python's tarfile module. NOTE: On macOS tar / gzip "
"generate Unity compatible but non-reproducible archives.")
flags.DEFINE_boolean(
"enforce_semver", True, "Whether to enforce semver (major.minor.patch) for"
"plugins_version. This is required to build UPM package.")
flags.DEFINE_multi_string("assets_dir", ".", "Directory containing assets to "
"package.")
flags.DEFINE_multi_string("assets_zip", None, "Zip files containing assets to "
"package.")
flags.DEFINE_multi_string("asset_file", None,
"File to copy in a directory to search for assets. "
"This is in the format "
"'input_filename:asset_filename' where "
"input_filename if the path to the file to copy and "
"asset_filename is the path to copy to in directory "
"to stage assets.")
flags.DEFINE_integer("timestamp", 1480838400, # 2016-12-04
"Timestamp to use for each file. "
"Set to 0 to use the current time.")
flags.DEFINE_string("owner", "root",
"Username of file owner in each generated package.")
flags.DEFINE_string("group", "root",
"Username of file group in each generated package.")
flags.DEFINE_string("output_dir", "output",
"Directory to write the resulting Unity package files.")
flags.DEFINE_string("output_zip", None, "Zip file to archive the output Unity "
"packages.")
flags.DEFINE_boolean(
"output_upm", False, "Whether output packages as tgz for"
"Unity Package Manager.")
flags.DEFINE_boolean("output_unitypackage", True, "Whether output packages as "
"asset packages.")
flags.DEFINE_multi_string("additional_file", None,
"Additional file in the format "
"'input_filename:output_filename', which copies the "
"specified input_filename to output_filename under "
"the output_dir. This can be used to store "
"additional files in the output directory or zip "
"file. If the ':output_filename' portion of the "
"argument isn't specified, the file will be written "
"to the same path as the specified input_filename "
"under the output_dir.")
flags.DEFINE_spaceseplist(
"enabled_sections", None,
("List of sections to include in the set of packages. "
"Package specifications that do not specify any sections are always "
"included."))
# Default metadata for all Unity 5.3+ assets.
DEFAULT_METADATA_TEMPLATE = collections.OrderedDict(
[("fileFormatVersion", 2),
("guid", None), # A unique GUID *must* be specified for all assets.
("labels", None), # Can optionally specific a list of asset label strings.
("timeCreated", 0)])
# A minimal set of Importer meta data.
#
# This importer is used if nothing more specific is needed and Unity can often
# infer the correct meta data using this by directory structure. This method is
# used if the json import group's "importer" field is "DefaultImporter".
DEFAULT_IMPORTER_DATA = [("userData", None),
("assetBundleName", None),
("assetBundleVariant", None)]
DEFAULT_IMPORTER_METADATA_TEMPLATE = collections.OrderedDict(
[("DefaultImporter", collections.OrderedDict(DEFAULT_IMPORTER_DATA))])
DEFAULT_FOLDER_METADATA_TEMPLATE = collections.OrderedDict([
("folderAsset", True),
("DefaultImporter", collections.OrderedDict(DEFAULT_IMPORTER_DATA))
])
PLATFORM_SETTINGS_DISABLED = [("enabled", 0)]
DEFAULT_PLATFORM_SETTINGS_EMPTY_DISABLED = collections.OrderedDict(
PLATFORM_SETTINGS_DISABLED +
[("settings", {})])
DEFAULT_PLATFORM_SETTINGS_DISABLED = collections.OrderedDict(
PLATFORM_SETTINGS_DISABLED +
[("settings", collections.OrderedDict(
[("CPU", "AnyCPU")]))])
DEFAULT_PLATFORM_SETTINGS_EDITOR = collections.OrderedDict(
PLATFORM_SETTINGS_DISABLED +
[("settings", collections.OrderedDict(
[("CPU", "AnyCPU"),
("DefaultValueInitialized", True),
("OS", "AnyOS")]))])
# When desktop platforms are disabled Unity expects the CPU to be set to None.
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE = collections.OrderedDict(
PLATFORM_SETTINGS_DISABLED +
[("settings", collections.OrderedDict(
[("CPU", "None")]))])
DEFAULT_PLATFORM_SETTINGS_DISABLED_IOS = collections.OrderedDict(
PLATFORM_SETTINGS_DISABLED +
[("settings", collections.OrderedDict(
[("CompileFlags", None),
("FrameworkDependencies", None)]))])
DEFAULT_PLATFORM_SETTINGS_DISABLED_TVOS = collections.OrderedDict(
PLATFORM_SETTINGS_DISABLED +
[("settings", collections.OrderedDict(
[("CompileFlags", None),
("FrameworkDependencies", None)]))])
PLUGIN_IMPORTER_METADATA_TEMPLATE = collections.OrderedDict(
[("PluginImporter", collections.OrderedDict(
[("serializedVersion", 1),
("iconMap", {}),
("executionOrder", {}),
("isPreloaded", 0),
("platformData", collections.OrderedDict(
[("Android", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED)),
("Any", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_EMPTY_DISABLED)),
("Editor", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_EDITOR)),
("Linux", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("Linux64", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("LinuxUniversal", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("OSXIntel", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("OSXIntel64", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("OSXUniversal", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("Web", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_EMPTY_DISABLED)),
("WebStreamed", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_EMPTY_DISABLED)),
("Win", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("Win64", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_CPU_NONE)),
("WindowsStoreApps", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED)),
("iOS", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_IOS)),
("tvOS", copy.deepcopy(
DEFAULT_PLATFORM_SETTINGS_DISABLED_TVOS)),
]))
] + DEFAULT_IMPORTER_DATA))
])
# Map of platforms to targets.
# Unity 5.6+ metadata requires a tuple of (target, name) for each platform.
# This ignores targets like "Facebook" which overlap with more common targets
# like "Standalone".
PLATFORM_TARGET_BY_PLATFORM = {
"Any": "Any",
"Editor": "Editor",
"Android": "Android",
"Linux": "Standalone",
"Linux64": "Standalone",
"LinuxUniversal": "Standalone",
"OSXIntel": "Standalone",
"OSXIntel64": "Standalone",
"OSXUniversal": "Standalone",
"Web": "WebGL",
"WebStreamed": "",
"Win": "Standalone",
"Win64": "Standalone",
"WindowsStoreApps": "Windows Store Apps",
"iOS": "iPhone",
"tvOS": "tvOS",
}
# Alias for standalone platforms specified by the keys of
# CPU_BY_DESKTOP_PLATFORM.
STANDALONE_PLATFORM_ALIAS = "Standalone"
# Maps architecture specific platform selections to "universal"
# platforms. Universal in Unity doesn't really mean that it can target any
# architecture, instead it is master flag that controls whether the asset is
# enabled for export.
ARCH_SPECIFIC_TO_UNIVERSAL_PLATFORM = {
"Linux": "LinuxUniversal",
"Linux64": "LinuxUniversal",
"OSXIntel": "OSXUniversal",
"OSXIntel64": "OSXUniversal",
}
# Set of supported platforms for each shared library extension.
PLATFORMS_BY_SHARED_LIBRARY_EXTENSION = {
".so": set(["Any", "Editor", "Linux", "Linux64", "LinuxUniversal"]),
".bundle": set(["Any", "Editor", "OSXIntel", "OSXIntel64", "OSXUniversal"]),
".dll": set(["Any", "Editor", "Win", "Win64"])
}
# Desktop platform to CPU mapping.
CPU_BY_DESKTOP_PLATFORM = {
"Linux": "x86",
"OSXIntel": "x86",
"Win": "x86",
"Linux64": "x86_64",
"OSXIntel64": "x86_64",
"Win64": "x86_64",
"LinuxUniversal": "AnyCPU",
"OSXUniversal": "AnyCPU",
}
# CPU to desktop platform mapping.
DESKTOP_PLATFORMS_BY_CPU = {
"x86": [p for p, c in CPU_BY_DESKTOP_PLATFORM.items() if c == "x86"],
"x86_64": [p for p, c in CPU_BY_DESKTOP_PLATFORM.items() if c == "x86_64"],
"AnyCPU": CPU_BY_DESKTOP_PLATFORM.keys(),
}
# Unity 5.6 and beyond modified the PluginImporter format such that platforms
# are enabled using a list of dictionaries with the keys "first" and "second"
# controlling platform settings. This constant matches the keys in entries of
# the PluginImporter.platformData list.
UNITY_5_6_PLATFORM_DATA_KEYS = ["first", "second"]
# Prefix for labels that are applied to files managed by the VersionHandler
# module.
VERSION_HANDLER_LABEL_PREFIX = "gvh"
# Prefix for version numbers in VersionHandler filenames and labels.
VERSION_HANDLER_VERSION_FIELD_PREFIX = "version-"
VERSION_HANDLER_MANIFEST_FIELD_PREFIX = "manifest"
# Separator for filenames and fields parsed by the VersionHandler.
VERSION_HANDLER_FIELD_SEPARATOR = "_"
# Prefix for labels that are applied to files managed by the Unity Package
# Manager module.
UPM_RESOLVER_LABEL_PREFIX = "gupmr"
UPM_RESOLVER_MANIFEST_FIELD_PREFIX = "manifest"
# Separator for filenames and fields parsed by the UPM Resolver.
UPM_RESOLVER_FIELD_SEPARATOR = "_"
# Prefix for latest labels that are applied to files managed by the
# VersionHandler module.
VERSION_HANDLER_PRESERVE_LABEL_PREFIX = "gvhp"
VERSION_HANDLER_PRESERVE_MANIFEST_NAME_FIELD_PREFIX = "manifestname-"
VERSION_HANDLER_PRESERVE_EXPORT_PATH_FIELD_PREFIX = "exportpath-"
VERSION_HANDLER_MANIFEST_TYPE_LEGACY = 0
VERSION_HANDLER_MANIFEST_TYPE_UPM = 1
# Prefix for canonical Linux library names.
VERSION_HANDLER_LINUXLIBNAME_FIELD_PREFIX = "linuxlibname-"
# Canonical prefix of Linux shared libraries.
LINUX_SHARED_LIBRARY_PREFIX = "lib"
# Extension used by Linux shared libraries.
LINUX_SHARED_LIBRARY_EXTENSION = ".so"
# Path relative to the "Assets" dir of native Linux plugin libraries.
LINUX_SHARED_LIBRARY_PATH = re.compile(
r"^Plugins/(x86|x86_64)/(.*{ext})".format(
ext=LINUX_SHARED_LIBRARY_EXTENSION.replace(".", r"\.")))
# Path components required for native desktop libraries.
SHARED_LIBRARY_PATH = re.compile(
r"(^|/)Plugins/(x86|x86_64)/(.*/|)[^/]+\.(so|dll|bundle)$")
# Prefix of the keywords to be added to UPM manifest to link to legacy manifest.
UPM_KEYWORDS_MANIFEST_PREFIX = "vh-name:"
# Everything in a Unity plugin - at the moment - lives under the Assets
# directory
ASSETS_DIRECTORY = "Assets"
# Extension for asset metadata files.
ASSET_METADATA_FILE_EXTENSION = ".meta"
# Valid version for asset package in form of major.minor.patch(-preview)
VALID_VERSION_RE = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+(-preview)?$")
# Documentation folder and filename for UPM package.
UPM_DOCUMENTATION_DIRECTORY = "Documentation~"
UPM_DOCUMENTATION_FILENAME = "index.md"
# String and unicode classes used to check types with safe_dict_get_value()
try:
unicode("") # See whether unicode class is available (Python < 3)
STR_OR_UNICODE = [str, unicode]
except NameError:
STR_OR_UNICODE = [str]
unicode = str # pylint: disable=redefined-builtin,invalid-name
def posix_path(path):
"""Convert path separators to POSIX style.
Args:
path: Path to convert.
Returns:
Path with POSIX separators, i.e / rather than \\.
"""
return path.replace('\\', '/')
class MissingGuidsError(Exception):
"""Raised when GUIDs are missing for input files in export_package().
Attributes:
missing_guid_paths: List of files missing GUIDs.
"""
def __init__(self, missing_guid_paths):
"""Initialize the instance.
Args:
missing_guid_paths: List of files missing GUIDs.
"""
self.missing_guid_paths = sorted(list(set(missing_guid_paths)))
super(MissingGuidsError, self).__init__(self.__str__())
def __str__(self):
"""Retrieves a description of this error."""
guids_file = FLAGS.guids_file if FLAGS.guids_file else ""
plugins_version = FLAGS.plugins_version if FLAGS.plugins_version else ""
return (("There were asset paths without a known guid. "
"generate guids for these assets:\n\n"
"{gen_guids} "
"--guids_file=\"{guids_file}\" "
"--version=\"{plugins_version}\" \"").format(
gen_guids=os.path.realpath(
os.path.join(os.path.dirname(__file__), "gen_guids.py")),
guids_file=guids_file, plugins_version=plugins_version) +
"\" \"".join(self.missing_guid_paths) + "\"")
class DuplicateGuidsError(Exception):
"""Raised when GUIDs are duplicated for multiple export paths.
Attributes:
paths_by_guid: GUIDs that have multiple paths associated with them.
"""
def __init__(self, paths_by_guid):
self.paths_by_guid = paths_by_guid
super(DuplicateGuidsError, self).__init__(self.__str__())
def __str__(self):
"""Retrieves a description of this error."""
return ("Found duplicate GUIDs that map to multiple paths.\n%s" %
"\n".join(["%s --> %s" % (guid, str(sorted(paths)))
for guid, paths in self.paths_by_guid.items()]))
class DuplicateGuidsChecker(object):
"""Ensures no duplicate GUIDs are present in the project.
Attributes:
_paths_by_guid: Set of file paths by GUID.
"""
def __init__(self):
"""Initialize this instance."""
self._paths_by_guid = collections.defaultdict(set)
def add_guid_and_path(self, guid, path):
"""Associate an export path with a GUID.
Args:
guid: GUID to add to this instance.
path: Path associated with this GUID.
"""
self._paths_by_guid[guid].add(posix_path(path))
def check_for_duplicates(self):
"""Check the set of GUIDs for duplicate paths.
Raises:
DuplicateGuidsError: If multiple paths are found for the same GUID.
"""
conflicting_paths_by_guid = dict(
[(guid, paths)
for guid, paths in self._paths_by_guid.items() if len(paths) > 1])
if conflicting_paths_by_guid:
raise DuplicateGuidsError(conflicting_paths_by_guid)
class YamlSerializer(object):
"""Loads and saves YAML files preserving the order of elements."""
class OrderedLoader(yaml.Loader):
"""Overrides the default YAML loader to construct nodes as OrderedDict."""
_initialized = False
@classmethod
def initialize(cls):
"""Installs the construct_mapping constructor on the Loader."""
if not cls._initialized:
cls.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
cls._construct_mapping)
cls._initialized = True
@staticmethod
def _construct_mapping(loader, node):
"""Constructs an OrderedDict from a YAML node.
Args:
loader: yaml.Loader loading the file.
node: Node being mapped to a python data structure.
Returns:
OrderedDict for the YAML node.
"""
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
class OrderedDumper(yaml.Dumper):
"""Overrides the default YAML serializer.
By default maps items to the OrderedDict structure, None to an empty
strings and disables aliases.
"""
_initialized = False
@classmethod
def initialize(cls):
"""Installs the representers on this class."""
if not cls._initialized:
# By default map data structures to OrderedDict.
cls.add_representer(collections.OrderedDict, cls._represent_map)
# By default map None to empty strings.
cls.add_representer(type(None), cls._represent_none)
# By default map unicode to strings.
cls.add_representer(unicode, cls._represent_unicode)
cls._initialized = True
@staticmethod
def _represent_unicode(dumper, data):
"""Strip the unicode tag from a yaml dump.
Args:
dumper: Generates the mapping.
data: Data to generate the representer for.
Returns:
String mapping for unicode data.
"""
return dumper.represent_scalar(u"tag:yaml.org,2002:str", data)
@staticmethod
def _represent_map(dumper, data):
"""Return a default representer for a map.
Args:
dumper: Generates the mapping.
data: Data to generate a representor for.
Returns:
The default mapping for a map.
"""
return dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items())
@staticmethod
def _represent_none(dumper, unused_data):
"""Return a representer for None that emits an empty string.
Args:
dumper: Generates the mapping.
unused_data: Unused.
Returns:
A mapping that returns an empty string for None entries.
"""
return dumper.represent_scalar(u"tag:yaml.org,2002:null", "")
def ignore_aliases(self, unused_data):
"""Disable data structure aliases.
Returns:
True always.
"""
return True
def __init__(self, *unused_argv):
"""Create the serializer."""
YamlSerializer.OrderedLoader.initialize()
YamlSerializer.OrderedDumper.initialize()
def load(self, yaml_string):
"""Load yaml from a string into this class.
Args:
yaml_string: String to load YAML from.
Returns:
OrderedDict loaded from YAML.
"""
return yaml.load(yaml_string, Loader=YamlSerializer.OrderedLoader)
def dump(self, data):
"""Generate a YAML string from the data in this class.
Args:
data: Set of Python data structures to dump to YAML.
Returns:
YAML string representation of this class.
"""
return yaml.dump(data, Dumper=YamlSerializer.OrderedDumper,
default_flow_style=False)
def merge_ordered_dicts(merge_into, merge_from):
"""Merge ordered dicts.
Merge nodes of merge_from into merge_into.
- If a node exists in merge_into and merge_from and they're both a dictionary,
merge them together.
- If a node exists in merge_into and merge_from and both values are lists of
dictionaries where each dictionary contains the keys "first" and "second",
merge the lists using the value of "first" in each dictionary as the merge
key. This allows modification of the platform targeting data structure in
Unity asset metadata.
In all other cases, replace the node in merge_info with the value from
merge_from.
Args:
merge_into: OrderedDict instance to merge values into.
merge_from: OrderedDict instance to merge values from.
Returns:
Value of merge_into.
"""
def list_contains_dictionaries_with_keys(list_to_query, expected_keys):
"""Check a list for dictionaries with exactly the specified keys.
Args:
list_to_query: List to query.
expected_keys: Keys to search for in each dictionary in the list.
Returns:
True if the list contains dictionaries with exactly the specified
keys, False otherwise.
"""
list_matches = False
if issubclass(list_to_query.__class__, list):
list_matches = list_to_query and True
for item in list_to_query:
if not (issubclass(item.__class__, dict) and
sorted(item.keys()) == expected_keys):
list_matches = False
break
return list_matches
if (issubclass(merge_from.__class__, dict) and
issubclass(merge_into.__class__, dict)):
for merge_from_key, merge_from_value in merge_from.items():
merge_into_value = merge_into.get(merge_from_key)
if merge_into_value is not None:
if (issubclass(merge_into_value.__class__, dict) and
issubclass(merge_from_value.__class__, dict)):
merge_ordered_dicts(merge_into_value, merge_from_value)
continue
if (list_contains_dictionaries_with_keys(
merge_into_value, UNITY_5_6_PLATFORM_DATA_KEYS) and
list_contains_dictionaries_with_keys(
merge_from_value, UNITY_5_6_PLATFORM_DATA_KEYS)):
for merge_from_list_item in merge_from_value:
# Try finding the dictionary to merge based upon the hash of the
# "first" item value.
merged = None
key = str(merge_from_list_item["first"])
for merge_into_list_item in merge_into_value:
if str(merge_into_list_item["first"]) == key:
merge_ordered_dicts(merge_into_list_item, merge_from_list_item)
merged = merge_into_list_item
break
# If the dictionary wasn't merged, add it to the list.
if not merged:
merge_into_value.append(merge_from_list_item)
continue
merge_into[merge_from_key] = merge_from_value
return merge_into
def safe_dict_get_value(tree_node, key, default_value=None, value_classes=None):
"""Safely retrieve a value from a node in a tree read from JSON or YAML.
The JSON and YAML parsers returns nodes that can be container or non-container
types. This method internally checks the node to make sure it's a dictionary
before querying for the specified key. If a default value or value_class are
specified, this method also ensures the returned value is derived from the
same type as default_value or value_type.
Args:
tree_node: Node to query.
key: Key to retrieve from the node.
default_value: Default value if the key doesn't exist in the node or the
node isn't derived from dictionary.
value_classes: List of expected classes of the key value. If the returned
type does not match one of these classes the default value is returned.
If this is not specified, the class of the default_value is used instead.
Returns:
Value corresponding to key in the tree_node or default_value if the key does
not exist or doesn't match the expected class.
"""
if not issubclass(tree_node.__class__, dict):
return default_value
value = tree_node.get(key)
if value is None:
value = default_value
elif default_value is not None or value_classes:
if not value_classes:
value_classes = [default_value.__class__]
if default_value.__class__ == str:
value_classes.append(unicode)
matches_class = False
for value_class in value_classes:
if issubclass(value.__class__, value_class):
matches_class = True
break
if not matches_class:
logging.warning("Expected class %s instead of class %s while reading key "
"%s from %s. Will use value %s instead of %s.\n%s",
value_classes, value.__class__, key, tree_node,
default_value, value, "".join(traceback.format_stack()))
value = default_value
return value
def safe_dict_set_value(tree_node, key, value):
"""Safely set a value to a node in a tree read from JSON or YAML.
The JSON and YAML parsers returns nodes that can be container or non-container
types. This method internally checks the node to make sure it's a dictionary
before setting for the specified key. If value is None, try to remove key
from tree_node.
Args:
tree_node: Node to set.
key: Key of the entry to be added to the node.
value: Value of the entry to be added to the node. If None, try to remove
the entry from tree_node.
Returns:
Return tree_node
"""
if not issubclass(tree_node.__class__, dict):
return tree_node
if value is None:
if key in tree_node:
del tree_node[key]
else:
tree_node[key] = value
return tree_node
class GuidDatabase(object):
"""Reads GUIDs from .meta files and a GUID cache.
Attributes:
_guids_by_path: Cache of GUIDs by path.
_duplicate_guids_checker: Instance of DuplicateGuidsChecker to ensure no
duplicate GUIDs are present.
"""
def __init__(self, duplicate_guids_checker, guids_json,
plugin_version):
"""Initialize the database with data from the GUIDs database.
Args:
duplicate_guids_checker: Instance of DuplicateGuidsChecker.
guids_json: JSON dictionary that contains the GUIDs to search for.
See firebase/app/client/unity/gen_guids.py for the format.
This can be None to not initialize the database.
plugin_version: Version to use for GUID selection in the specified JSON.
"""
self._guids_by_path = {}
self._duplicate_guids_checker = duplicate_guids_checker
if guids_json:
guid_map = safe_dict_get_value(guids_json, plugin_version,
default_value={})
for filename, guid in guid_map.items():
self.add_guid(posix_path(filename), guid)
if plugin_version:
# Aggregate guids for older versions of files.
current_version = packaging.version.Version(plugin_version)
for version in sorted(guids_json, key=packaging.version.Version,
reverse=True):
# Skip all versions after and including the current version.
if packaging.version.Version(version) >= current_version:
continue
# Add all guids for files to the current version.
guids_by_filename = guids_json[version]
for filename in guids_by_filename:
if filename not in guid_map:
self.add_guid(filename, guids_by_filename[filename])
def add_guid(self, path, guid):
"""Add a GUID for the specified path to the guid_map and GUID checker.