@@ -1805,6 +1805,7 @@ def run(
1805
1805
service_account : Optional [str ] = None ,
1806
1806
bigquery_destination : Optional [str ] = None ,
1807
1807
args : Optional [List [Union [str , float , int ]]] = None ,
1808
+ environment_variables : Optional [Dict [str , str ]] = None ,
1808
1809
replica_count : int = 0 ,
1809
1810
machine_type : str = "n1-standard-4" ,
1810
1811
accelerator_type : str = "ACCELERATOR_TYPE_UNSPECIFIED" ,
@@ -1880,6 +1881,13 @@ def run(
1880
1881
base_output_dir (str):
1881
1882
GCS output directory of job. If not provided a
1882
1883
timestamped directory in the staging directory will be used.
1884
+
1885
+ AI Platform sets the following environment variables when it runs your training code:
1886
+
1887
+ - AIP_MODEL_DIR: a Cloud Storage URI of a directory intended for saving model artifacts, i.e. <base_output_dir>/model/
1888
+ - AIP_CHECKPOINT_DIR: a Cloud Storage URI of a directory intended for saving checkpoints, i.e. <base_output_dir>/checkpoints/
1889
+ - AIP_TENSORBOARD_LOG_DIR: a Cloud Storage URI of a directory intended for saving TensorBoard logs, i.e. <base_output_dir>/logs/
1890
+
1883
1891
service_account (str):
1884
1892
Specifies the service account for workload run-as account.
1885
1893
Users submitting jobs must have act-as permission on this run-as account.
@@ -1900,6 +1908,16 @@ def run(
1900
1908
- AIP_TEST_DATA_URI = "bigquery_destination.dataset_*.test"
1901
1909
args (List[Unions[str, int, float]]):
1902
1910
Command line arguments to be passed to the Python script.
1911
+ environment_variables (Dict[str, str]):
1912
+ Environment variables to be passed to the container.
1913
+ Should be a dictionary where keys are environment variable names
1914
+ and values are environment variable values for those names.
1915
+ At most 10 environment variables can be specified.
1916
+ The Name of the environment variable must be unique.
1917
+
1918
+ environment_variables = {
1919
+ 'MY_KEY': 'MY_VALUE'
1920
+ }
1903
1921
replica_count (int):
1904
1922
The number of worker replicas. If replica count = 1 then one chief
1905
1923
replica will be provisioned. If replica_count > 1 the remainder will be
@@ -1960,6 +1978,7 @@ def run(
1960
1978
worker_pool_specs = worker_pool_specs ,
1961
1979
managed_model = managed_model ,
1962
1980
args = args ,
1981
+ environment_variables = environment_variables ,
1963
1982
base_output_dir = base_output_dir ,
1964
1983
service_account = service_account ,
1965
1984
bigquery_destination = bigquery_destination ,
@@ -1986,6 +2005,7 @@ def _run(
1986
2005
worker_pool_specs : _DistributedTrainingSpec ,
1987
2006
managed_model : Optional [gca_model .Model ] = None ,
1988
2007
args : Optional [List [Union [str , float , int ]]] = None ,
2008
+ environment_variables : Optional [Dict [str , str ]] = None ,
1989
2009
base_output_dir : Optional [str ] = None ,
1990
2010
service_account : Optional [str ] = None ,
1991
2011
bigquery_destination : Optional [str ] = None ,
@@ -2018,9 +2038,26 @@ def _run(
2018
2038
Model proto if this script produces a Managed Model.
2019
2039
args (List[Unions[str, int, float]]):
2020
2040
Command line arguments to be passed to the Python script.
2041
+ environment_variables (Dict[str, str]):
2042
+ Environment variables to be passed to the container.
2043
+ Should be a dictionary where keys are environment variable names
2044
+ and values are environment variable values for those names.
2045
+ At most 10 environment variables can be specified.
2046
+ The Name of the environment variable must be unique.
2047
+
2048
+ environment_variables = {
2049
+ 'MY_KEY': 'MY_VALUE'
2050
+ }
2021
2051
base_output_dir (str):
2022
2052
GCS output directory of job. If not provided a
2023
2053
timestamped directory in the staging directory will be used.
2054
+
2055
+ AI Platform sets the following environment variables when it runs your training code:
2056
+
2057
+ - AIP_MODEL_DIR: a Cloud Storage URI of a directory intended for saving model artifacts, i.e. <base_output_dir>/model/
2058
+ - AIP_CHECKPOINT_DIR: a Cloud Storage URI of a directory intended for saving checkpoints, i.e. <base_output_dir>/checkpoints/
2059
+ - AIP_TENSORBOARD_LOG_DIR: a Cloud Storage URI of a directory intended for saving TensorBoard logs, i.e. <base_output_dir>/logs/
2060
+
2024
2061
service_account (str):
2025
2062
Specifies the service account for workload run-as account.
2026
2063
Users submitting jobs must have act-as permission on this run-as account.
@@ -2083,6 +2120,9 @@ def _run(
2083
2120
if args :
2084
2121
spec ["pythonPackageSpec" ]["args" ] = args
2085
2122
2123
+ if environment_variables :
2124
+ spec ["pythonPackageSpec" ]["env" ] = environment_variables
2125
+
2086
2126
(
2087
2127
training_task_inputs ,
2088
2128
base_output_dir ,
@@ -2334,6 +2374,7 @@ def run(
2334
2374
service_account : Optional [str ] = None ,
2335
2375
bigquery_destination : Optional [str ] = None ,
2336
2376
args : Optional [List [Union [str , float , int ]]] = None ,
2377
+ environment_variables : Optional [Dict [str , str ]] = None ,
2337
2378
replica_count : int = 0 ,
2338
2379
machine_type : str = "n1-standard-4" ,
2339
2380
accelerator_type : str = "ACCELERATOR_TYPE_UNSPECIFIED" ,
@@ -2402,6 +2443,13 @@ def run(
2402
2443
base_output_dir (str):
2403
2444
GCS output directory of job. If not provided a
2404
2445
timestamped directory in the staging directory will be used.
2446
+
2447
+ AI Platform sets the following environment variables when it runs your training code:
2448
+
2449
+ - AIP_MODEL_DIR: a Cloud Storage URI of a directory intended for saving model artifacts, i.e. <base_output_dir>/model/
2450
+ - AIP_CHECKPOINT_DIR: a Cloud Storage URI of a directory intended for saving checkpoints, i.e. <base_output_dir>/checkpoints/
2451
+ - AIP_TENSORBOARD_LOG_DIR: a Cloud Storage URI of a directory intended for saving TensorBoard logs, i.e. <base_output_dir>/logs/
2452
+
2405
2453
service_account (str):
2406
2454
Specifies the service account for workload run-as account.
2407
2455
Users submitting jobs must have act-as permission on this run-as account.
@@ -2422,6 +2470,16 @@ def run(
2422
2470
- AIP_TEST_DATA_URI = "bigquery_destination.dataset_*.test"
2423
2471
args (List[Unions[str, int, float]]):
2424
2472
Command line arguments to be passed to the Python script.
2473
+ environment_variables (Dict[str, str]):
2474
+ Environment variables to be passed to the container.
2475
+ Should be a dictionary where keys are environment variable names
2476
+ and values are environment variable values for those names.
2477
+ At most 10 environment variables can be specified.
2478
+ The Name of the environment variable must be unique.
2479
+
2480
+ environment_variables = {
2481
+ 'MY_KEY': 'MY_VALUE'
2482
+ }
2425
2483
replica_count (int):
2426
2484
The number of worker replicas. If replica count = 1 then one chief
2427
2485
replica will be provisioned. If replica_count > 1 the remainder will be
@@ -2481,6 +2539,7 @@ def run(
2481
2539
worker_pool_specs = worker_pool_specs ,
2482
2540
managed_model = managed_model ,
2483
2541
args = args ,
2542
+ environment_variables = environment_variables ,
2484
2543
base_output_dir = base_output_dir ,
2485
2544
service_account = service_account ,
2486
2545
bigquery_destination = bigquery_destination ,
@@ -2506,6 +2565,7 @@ def _run(
2506
2565
worker_pool_specs : _DistributedTrainingSpec ,
2507
2566
managed_model : Optional [gca_model .Model ] = None ,
2508
2567
args : Optional [List [Union [str , float , int ]]] = None ,
2568
+ environment_variables : Optional [Dict [str , str ]] = None ,
2509
2569
base_output_dir : Optional [str ] = None ,
2510
2570
service_account : Optional [str ] = None ,
2511
2571
bigquery_destination : Optional [str ] = None ,
@@ -2535,9 +2595,26 @@ def _run(
2535
2595
Model proto if this script produces a Managed Model.
2536
2596
args (List[Unions[str, int, float]]):
2537
2597
Command line arguments to be passed to the Python script.
2598
+ environment_variables (Dict[str, str]):
2599
+ Environment variables to be passed to the container.
2600
+ Should be a dictionary where keys are environment variable names
2601
+ and values are environment variable values for those names.
2602
+ At most 10 environment variables can be specified.
2603
+ The Name of the environment variable must be unique.
2604
+
2605
+ environment_variables = {
2606
+ 'MY_KEY': 'MY_VALUE'
2607
+ }
2538
2608
base_output_dir (str):
2539
2609
GCS output directory of job. If not provided a
2540
2610
timestamped directory in the staging directory will be used.
2611
+
2612
+ AI Platform sets the following environment variables when it runs your training code:
2613
+
2614
+ - AIP_MODEL_DIR: a Cloud Storage URI of a directory intended for saving model artifacts, i.e. <base_output_dir>/model/
2615
+ - AIP_CHECKPOINT_DIR: a Cloud Storage URI of a directory intended for saving checkpoints, i.e. <base_output_dir>/checkpoints/
2616
+ - AIP_TENSORBOARD_LOG_DIR: a Cloud Storage URI of a directory intended for saving TensorBoard logs, i.e. <base_output_dir>/logs/
2617
+
2541
2618
service_account (str):
2542
2619
Specifies the service account for workload run-as account.
2543
2620
Users submitting jobs must have act-as permission on this run-as account.
@@ -2593,6 +2670,9 @@ def _run(
2593
2670
if args :
2594
2671
spec ["containerSpec" ]["args" ] = args
2595
2672
2673
+ if environment_variables :
2674
+ spec ["containerSpec" ]["env" ] = environment_variables
2675
+
2596
2676
(
2597
2677
training_task_inputs ,
2598
2678
base_output_dir ,
@@ -3625,6 +3705,7 @@ def run(
3625
3705
service_account : Optional [str ] = None ,
3626
3706
bigquery_destination : Optional [str ] = None ,
3627
3707
args : Optional [List [Union [str , float , int ]]] = None ,
3708
+ environment_variables : Optional [Dict [str , str ]] = None ,
3628
3709
replica_count : int = 0 ,
3629
3710
machine_type : str = "n1-standard-4" ,
3630
3711
accelerator_type : str = "ACCELERATOR_TYPE_UNSPECIFIED" ,
@@ -3693,6 +3774,13 @@ def run(
3693
3774
base_output_dir (str):
3694
3775
GCS output directory of job. If not provided a
3695
3776
timestamped directory in the staging directory will be used.
3777
+
3778
+ AI Platform sets the following environment variables when it runs your training code:
3779
+
3780
+ - AIP_MODEL_DIR: a Cloud Storage URI of a directory intended for saving model artifacts, i.e. <base_output_dir>/model/
3781
+ - AIP_CHECKPOINT_DIR: a Cloud Storage URI of a directory intended for saving checkpoints, i.e. <base_output_dir>/checkpoints/
3782
+ - AIP_TENSORBOARD_LOG_DIR: a Cloud Storage URI of a directory intended for saving TensorBoard logs, i.e. <base_output_dir>/logs/
3783
+
3696
3784
service_account (str):
3697
3785
Specifies the service account for workload run-as account.
3698
3786
Users submitting jobs must have act-as permission on this run-as account.
@@ -3713,6 +3801,16 @@ def run(
3713
3801
- AIP_TEST_DATA_URI = "bigquery_destination.dataset_*.test"
3714
3802
args (List[Unions[str, int, float]]):
3715
3803
Command line arguments to be passed to the Python script.
3804
+ environment_variables (Dict[str, str]):
3805
+ Environment variables to be passed to the container.
3806
+ Should be a dictionary where keys are environment variable names
3807
+ and values are environment variable values for those names.
3808
+ At most 10 environment variables can be specified.
3809
+ The Name of the environment variable must be unique.
3810
+
3811
+ environment_variables = {
3812
+ 'MY_KEY': 'MY_VALUE'
3813
+ }
3716
3814
replica_count (int):
3717
3815
The number of worker replicas. If replica count = 1 then one chief
3718
3816
replica will be provisioned. If replica_count > 1 the remainder will be
@@ -3767,6 +3865,7 @@ def run(
3767
3865
worker_pool_specs = worker_pool_specs ,
3768
3866
managed_model = managed_model ,
3769
3867
args = args ,
3868
+ environment_variables = environment_variables ,
3770
3869
base_output_dir = base_output_dir ,
3771
3870
service_account = service_account ,
3772
3871
training_fraction_split = training_fraction_split ,
@@ -3792,6 +3891,7 @@ def _run(
3792
3891
worker_pool_specs : _DistributedTrainingSpec ,
3793
3892
managed_model : Optional [gca_model .Model ] = None ,
3794
3893
args : Optional [List [Union [str , float , int ]]] = None ,
3894
+ environment_variables : Optional [Dict [str , str ]] = None ,
3795
3895
base_output_dir : Optional [str ] = None ,
3796
3896
service_account : Optional [str ] = None ,
3797
3897
training_fraction_split : float = 0.8 ,
@@ -3822,9 +3922,26 @@ def _run(
3822
3922
Model proto if this script produces a Managed Model.
3823
3923
args (List[Unions[str, int, float]]):
3824
3924
Command line arguments to be passed to the Python script.
3925
+ environment_variables (Dict[str, str]):
3926
+ Environment variables to be passed to the container.
3927
+ Should be a dictionary where keys are environment variable names
3928
+ and values are environment variable values for those names.
3929
+ At most 10 environment variables can be specified.
3930
+ The Name of the environment variable must be unique.
3931
+
3932
+ environment_variables = {
3933
+ 'MY_KEY': 'MY_VALUE'
3934
+ }
3825
3935
base_output_dir (str):
3826
3936
GCS output directory of job. If not provided a
3827
3937
timestamped directory in the staging directory will be used.
3938
+
3939
+ AI Platform sets the following environment variables when it runs your training code:
3940
+
3941
+ - AIP_MODEL_DIR: a Cloud Storage URI of a directory intended for saving model artifacts, i.e. <base_output_dir>/model/
3942
+ - AIP_CHECKPOINT_DIR: a Cloud Storage URI of a directory intended for saving checkpoints, i.e. <base_output_dir>/checkpoints/
3943
+ - AIP_TENSORBOARD_LOG_DIR: a Cloud Storage URI of a directory intended for saving TensorBoard logs, i.e. <base_output_dir>/logs/
3944
+
3828
3945
service_account (str):
3829
3946
Specifies the service account for workload run-as account.
3830
3947
Users submitting jobs must have act-as permission on this run-as account.
@@ -3866,6 +3983,9 @@ def _run(
3866
3983
if args :
3867
3984
spec ["pythonPackageSpec" ]["args" ] = args
3868
3985
3986
+ if environment_variables :
3987
+ spec ["pythonPackageSpec" ]["env" ] = environment_variables
3988
+
3869
3989
(
3870
3990
training_task_inputs ,
3871
3991
base_output_dir ,
0 commit comments