Skip to content

Commit d364b14

Browse files
chore(api): update composite API spec
1 parent 7866f94 commit d364b14

12 files changed

Lines changed: 82 additions & 10 deletions

.stats.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 1826
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-72ce892ddf4f0cfe8ab511c2f84e4f8612d839ba17e981262ccc0e29c7c4c5df.yml
3-
openapi_spec_hash: e93d4fcda4bcd5ec1a77ddb5782ffd78
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cloudflare%2Fcloudflare-15982839c3e7b1be15bee16e97c5dcef8397120052303cf3fc3b2b75d9bee219.yml
3+
openapi_spec_hash: 6d2e235e75f6ce217b0b84c07c6f5aac
44
config_hash: 59212900ef3970ac3c41ca2537fabe7b

src/cloudflare/resources/bot_management.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ def update(
5252
*,
5353
zone_id: str,
5454
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
55+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
5556
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
5657
enable_js: bool | Omit = omit,
5758
fight_mode: bool | Omit = omit,
@@ -132,6 +133,8 @@ def update(
132133
ai_bots_protection: Enable rule to block AI Scrapers and Crawlers. Please note the value
133134
`only_on_ad_pages` is currently not available for Enterprise customers.
134135
136+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
137+
135138
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
136139
137140
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -158,6 +161,7 @@ def update(
158161
*,
159162
zone_id: str,
160163
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
164+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
161165
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
162166
enable_js: bool | Omit = omit,
163167
is_robots_txt_managed: bool | Omit = omit,
@@ -241,6 +245,8 @@ def update(
241245
ai_bots_protection: Enable rule to block AI Scrapers and Crawlers. Please note the value
242246
`only_on_ad_pages` is currently not available for Enterprise customers.
243247
248+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
249+
244250
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
245251
246252
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -275,6 +281,7 @@ def update(
275281
*,
276282
zone_id: str,
277283
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
284+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
278285
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
279286
enable_js: bool | Omit = omit,
280287
is_robots_txt_managed: bool | Omit = omit,
@@ -359,6 +366,8 @@ def update(
359366
ai_bots_protection: Enable rule to block AI Scrapers and Crawlers. Please note the value
360367
`only_on_ad_pages` is currently not available for Enterprise customers.
361368
369+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
370+
362371
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
363372
364373
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -397,6 +406,7 @@ def update(
397406
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
398407
auto_update_model: bool | Omit = omit,
399408
bm_cookie_enabled: bool | Omit = omit,
409+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
400410
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
401411
enable_js: bool | Omit = omit,
402412
is_robots_txt_managed: bool | Omit = omit,
@@ -484,6 +494,8 @@ def update(
484494
bm_cookie_enabled: Indicates that the bot management cookie can be placed on end user devices
485495
accessing the site. Defaults to true
486496
497+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
498+
487499
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
488500
489501
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -511,6 +523,7 @@ def update(
511523
*,
512524
zone_id: str,
513525
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
526+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
514527
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
515528
enable_js: bool | Omit = omit,
516529
fight_mode: bool | Omit = omit,
@@ -539,6 +552,7 @@ def update(
539552
body=maybe_transform(
540553
{
541554
"ai_bots_protection": ai_bots_protection,
555+
"cf_robots_variant": cf_robots_variant,
542556
"crawler_protection": crawler_protection,
543557
"enable_js": enable_js,
544558
"fight_mode": fight_mode,
@@ -638,6 +652,7 @@ async def update(
638652
*,
639653
zone_id: str,
640654
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
655+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
641656
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
642657
enable_js: bool | Omit = omit,
643658
fight_mode: bool | Omit = omit,
@@ -718,6 +733,8 @@ async def update(
718733
ai_bots_protection: Enable rule to block AI Scrapers and Crawlers. Please note the value
719734
`only_on_ad_pages` is currently not available for Enterprise customers.
720735
736+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
737+
721738
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
722739
723740
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -744,6 +761,7 @@ async def update(
744761
*,
745762
zone_id: str,
746763
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
764+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
747765
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
748766
enable_js: bool | Omit = omit,
749767
is_robots_txt_managed: bool | Omit = omit,
@@ -827,6 +845,8 @@ async def update(
827845
ai_bots_protection: Enable rule to block AI Scrapers and Crawlers. Please note the value
828846
`only_on_ad_pages` is currently not available for Enterprise customers.
829847
848+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
849+
830850
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
831851
832852
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -861,6 +881,7 @@ async def update(
861881
*,
862882
zone_id: str,
863883
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
884+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
864885
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
865886
enable_js: bool | Omit = omit,
866887
is_robots_txt_managed: bool | Omit = omit,
@@ -945,6 +966,8 @@ async def update(
945966
ai_bots_protection: Enable rule to block AI Scrapers and Crawlers. Please note the value
946967
`only_on_ad_pages` is currently not available for Enterprise customers.
947968
969+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
970+
948971
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
949972
950973
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -983,6 +1006,7 @@ async def update(
9831006
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
9841007
auto_update_model: bool | Omit = omit,
9851008
bm_cookie_enabled: bool | Omit = omit,
1009+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
9861010
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
9871011
enable_js: bool | Omit = omit,
9881012
is_robots_txt_managed: bool | Omit = omit,
@@ -1070,6 +1094,8 @@ async def update(
10701094
bm_cookie_enabled: Indicates that the bot management cookie can be placed on end user devices
10711095
accessing the site. Defaults to true
10721096
1097+
cf_robots_variant: Specifies the Robots Access Control License variant to use.
1098+
10731099
crawler_protection: Enable rule to punish AI Scrapers and Crawlers via a link maze.
10741100
10751101
enable_js: Use lightweight, invisible JavaScript detections to improve Bot Management.
@@ -1097,6 +1123,7 @@ async def update(
10971123
*,
10981124
zone_id: str,
10991125
ai_bots_protection: Literal["block", "disabled", "only_on_ad_pages"] | Omit = omit,
1126+
cf_robots_variant: Literal["off", "policy_only"] | Omit = omit,
11001127
crawler_protection: Literal["enabled", "disabled"] | Omit = omit,
11011128
enable_js: bool | Omit = omit,
11021129
fight_mode: bool | Omit = omit,
@@ -1125,6 +1152,7 @@ async def update(
11251152
body=await async_maybe_transform(
11261153
{
11271154
"ai_bots_protection": ai_bots_protection,
1155+
"cf_robots_variant": cf_robots_variant,
11281156
"crawler_protection": crawler_protection,
11291157
"enable_js": enable_js,
11301158
"fight_mode": fight_mode,

src/cloudflare/types/bot_management/bot_fight_mode_configuration.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,9 @@ class BotFightModeConfiguration(BaseModel):
4242
Enterprise customers.
4343
"""
4444

45+
cf_robots_variant: Optional[Literal["off", "policy_only"]] = None
46+
"""Specifies the Robots Access Control License variant to use."""
47+
4548
crawler_protection: Optional[Literal["enabled", "disabled"]] = None
4649
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
4750

src/cloudflare/types/bot_management/bot_fight_mode_configuration_param.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ class BotFightModeConfigurationParam(TypedDict, total=False):
1515
Enterprise customers.
1616
"""
1717

18+
cf_robots_variant: Literal["off", "policy_only"]
19+
"""Specifies the Robots Access Control License variant to use."""
20+
1821
crawler_protection: Literal["enabled", "disabled"]
1922
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
2023

src/cloudflare/types/bot_management/bot_management_update_params.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ class BotFightModeConfiguration(TypedDict, total=False):
2525
Enterprise customers.
2626
"""
2727

28+
cf_robots_variant: Literal["off", "policy_only"]
29+
"""Specifies the Robots Access Control License variant to use."""
30+
2831
crawler_protection: Literal["enabled", "disabled"]
2932
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
3033

@@ -56,6 +59,9 @@ class SuperBotFightModeDefinitelyConfiguration(TypedDict, total=False):
5659
Enterprise customers.
5760
"""
5861

62+
cf_robots_variant: Literal["off", "policy_only"]
63+
"""Specifies the Robots Access Control License variant to use."""
64+
5965
crawler_protection: Literal["enabled", "disabled"]
6066
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
6167

@@ -100,6 +106,9 @@ class SuperBotFightModeLikelyConfiguration(TypedDict, total=False):
100106
Enterprise customers.
101107
"""
102108

109+
cf_robots_variant: Literal["off", "policy_only"]
110+
"""Specifies the Robots Access Control License variant to use."""
111+
103112
crawler_protection: Literal["enabled", "disabled"]
104113
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
105114

@@ -160,6 +169,9 @@ class SubscriptionConfiguration(TypedDict, total=False):
160169
accessing the site. Defaults to true
161170
"""
162171

172+
cf_robots_variant: Literal["off", "policy_only"]
173+
"""Specifies the Robots Access Control License variant to use."""
174+
163175
crawler_protection: Literal["enabled", "disabled"]
164176
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
165177

src/cloudflare/types/bot_management/subscription_configuration.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,9 @@ class SubscriptionConfiguration(BaseModel):
5555
accessing the site. Defaults to true
5656
"""
5757

58+
cf_robots_variant: Optional[Literal["off", "policy_only"]] = None
59+
"""Specifies the Robots Access Control License variant to use."""
60+
5861
crawler_protection: Optional[Literal["enabled", "disabled"]] = None
5962
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
6063

src/cloudflare/types/bot_management/subscription_configuration_param.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,9 @@ class SubscriptionConfigurationParam(TypedDict, total=False):
2828
accessing the site. Defaults to true
2929
"""
3030

31+
cf_robots_variant: Literal["off", "policy_only"]
32+
"""Specifies the Robots Access Control License variant to use."""
33+
3134
crawler_protection: Literal["enabled", "disabled"]
3235
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
3336

src/cloudflare/types/bot_management/super_bot_fight_mode_definitely_configuration.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,9 @@ class SuperBotFightModeDefinitelyConfiguration(BaseModel):
2727
Enterprise customers.
2828
"""
2929

30+
cf_robots_variant: Optional[Literal["off", "policy_only"]] = None
31+
"""Specifies the Robots Access Control License variant to use."""
32+
3033
crawler_protection: Optional[Literal["enabled", "disabled"]] = None
3134
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
3235

src/cloudflare/types/bot_management/super_bot_fight_mode_definitely_configuration_param.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,9 @@ class SuperBotFightModeDefinitelyConfigurationParam(TypedDict, total=False):
1515
Enterprise customers.
1616
"""
1717

18+
cf_robots_variant: Literal["off", "policy_only"]
19+
"""Specifies the Robots Access Control License variant to use."""
20+
1821
crawler_protection: Literal["enabled", "disabled"]
1922
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
2023

src/cloudflare/types/bot_management/super_bot_fight_mode_likely_configuration.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,9 @@ class SuperBotFightModeLikelyConfiguration(BaseModel):
2121
Enterprise customers.
2222
"""
2323

24+
cf_robots_variant: Optional[Literal["off", "policy_only"]] = None
25+
"""Specifies the Robots Access Control License variant to use."""
26+
2427
crawler_protection: Optional[Literal["enabled", "disabled"]] = None
2528
"""Enable rule to punish AI Scrapers and Crawlers via a link maze."""
2629

0 commit comments

Comments
 (0)