Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Join implicit concatenated strings when they fit on a line #13663

Draft
wants to merge 7 commits into
base: main
Choose a base branch
from

Conversation

MichaReiser
Copy link
Member

@MichaReiser MichaReiser commented Oct 7, 2024

Summary

Implements #9457 for f-strings, regular strings, and byte literals. The implementation doesn't handle raw strings or triple quoted strings, similar to black's string-processing preview style

Test Plan

@MichaReiser MichaReiser added formatter Related to the formatter preview Related to preview mode features labels Oct 7, 2024
Copy link
Contributor

github-actions bot commented Oct 7, 2024

ruff-ecosystem results

Formatter (stable)

ℹ️ ecosystem check detected format changes. (+1056 -1172 lines in 304 files in 20 projects; 34 projects unchanged)

RasaHQ/rasa (+51 -65 lines across 28 files)

rasa/cli/utils.py~L278

     # Check if a valid setting for `max_history` was given
     if isinstance(max_history, int) and max_history < 1:
         raise argparse.ArgumentTypeError(
-            f"The value of `--max-history {max_history}` " f"is not a positive integer."
+            f"The value of `--max-history {max_history}` is not a positive integer."
         )
 
     return validator.verify_story_structure(

rasa/cli/x.py~L165

         attempts -= 1
 
     rasa.shared.utils.cli.print_error_and_exit(
-        "Could not fetch runtime config from server at '{}'. " "Exiting.".format(
+        "Could not fetch runtime config from server at '{}'. Exiting.".format(
             config_endpoint
         )
     )

rasa/core/actions/action.py~L322

         if message is None:
             if not self.silent_fail:
                 logger.error(
-                    "Couldn't create message for response '{}'." "".format(
+                    "Couldn't create message for response '{}'.".format(
                         self.utter_action
                     )
                 )

rasa/core/actions/action.py~L470

         else:
             if not self.silent_fail:
                 logger.error(
-                    "Couldn't create message for response action '{}'." "".format(
+                    "Couldn't create message for response action '{}'.".format(
                         self.action_name
                     )
                 )

rasa/core/channels/console.py~L194

     exit_text = INTENT_MESSAGE_PREFIX + "stop"
 
     rasa.shared.utils.cli.print_success(
-        "Bot loaded. Type a message and press enter " "(use '{}' to exit): ".format(
+        "Bot loaded. Type a message and press enter (use '{}' to exit): ".format(
             exit_text
         )
     )

rasa/core/channels/telegram.py~L97

                     reply_markup.add(KeyboardButton(button["title"]))
         else:
             logger.error(
-                "Trying to send text with buttons for unknown " "button type {}".format(
+                "Trying to send text with buttons for unknown button type {}".format(
                     button_type
                 )
             )

rasa/core/nlg/callback.py~L81

         body = nlg_request_format(utter_action, tracker, output_channel, **kwargs)
 
         logger.debug(
-            "Requesting NLG for {} from {}." "The request body is {}." "".format(
+            "Requesting NLG for {} from {}.The request body is {}.".format(
                 utter_action, self.nlg_endpoint.url, json.dumps(body)
             )
         )

rasa/core/policies/policy.py~L250

         max_training_samples = kwargs.get("max_training_samples")
         if max_training_samples is not None:
             logger.debug(
-                "Limit training data to {} training samples." "".format(
+                "Limit training data to {} training samples.".format(
                     max_training_samples
                 )
             )

rasa/core/policies/ted_policy.py~L837

             # take the last prediction in the sequence
             similarities = outputs["similarities"][:, -1, :]
         else:
-            raise TypeError(
-                "model output for `similarities` " "should be a numpy array"
-            )
+            raise TypeError("model output for `similarities` should be a numpy array")
         if isinstance(outputs["scores"], np.ndarray):
             confidences = outputs["scores"][:, -1, :]
         else:

rasa/core/policies/unexpected_intent_policy.py~L612

         if isinstance(output["similarities"], np.ndarray):
             sequence_similarities = output["similarities"][:, -1, :]
         else:
-            raise TypeError(
-                "model output for `similarities` " "should be a numpy array"
-            )
+            raise TypeError("model output for `similarities` should be a numpy array")
 
         # Check for unlikely intent
         last_user_uttered_event = tracker.get_last_event_for(UserUttered)

rasa/core/test.py~L772

         ):
             story_dump = YAMLStoryWriter().dumps(partial_tracker.as_story().story_steps)
             error_msg = (
-                f"Model predicted a wrong action. Failed Story: " f"\n\n{story_dump}"
+                f"Model predicted a wrong action. Failed Story: \n\n{story_dump}"
             )
             raise WrongPredictionException(error_msg)
     elif prev_action_unlikely_intent:

rasa/core/train.py~L34

             for policy_config in policy_configs:
                 config_name = os.path.splitext(os.path.basename(policy_config))[0]
                 logging.info(
-                    "Starting to train {} round {}/{}" " with {}% exclusion" "".format(
+                    "Starting to train {} round {}/{} with {}% exclusion".format(
                         config_name, current_run, len(exclusion_percentages), percentage
                     )
                 )

rasa/core/training/interactive.py~L723

     # export training data and quit
     questions = questionary.form(
         export_stories=questionary.text(
-            message="Export stories to (if file exists, this "
-            "will append the stories)",
+            message="Export stories to (if file exists, this will append the stories)",
             default=PATHS["stories"],
             validate=io_utils.file_type_validator(
                 rasa.shared.data.YAML_FILE_EXTENSIONS,

rasa/core/training/interactive.py~L738

             default=PATHS["nlu"],
             validate=io_utils.file_type_validator(
                 list(rasa.shared.data.TRAINING_DATA_EXTENSIONS),
-                "Please provide a valid export path for the NLU data, "
-                "e.g. 'nlu.yml'.",
+                "Please provide a valid export path for the NLU data, e.g. 'nlu.yml'.",
             ),
         ),
         export_domain=questionary.text(
-            message="Export domain file to (if file exists, this "
-            "will be overwritten)",
+            message="Export domain file to (if file exists, this will be overwritten)",
             default=PATHS["domain"],
             validate=io_utils.file_type_validator(
                 rasa.shared.data.YAML_FILE_EXTENSIONS,

rasa/core/utils.py~L41

     """
     if use_syslog:
         formatter = logging.Formatter(
-            "%(asctime)s [%(levelname)-5.5s] [%(process)d]" " %(message)s"
+            "%(asctime)s [%(levelname)-5.5s] [%(process)d] %(message)s"
         )
         socktype = SOCK_STREAM if syslog_protocol == TCP_PROTOCOL else SOCK_DGRAM
         syslog_handler = logging.handlers.SysLogHandler(

rasa/core/utils.py~L73

     """
     if hot_idx >= length:
         raise ValueError(
-            "Can't create one hot. Index '{}' is out " "of range (length '{}')".format(
+            "Can't create one hot. Index '{}' is out of range (length '{}')".format(
                 hot_idx, length
             )
         )

rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py~L166

                 )
             if self.stop_words is not None:
                 logger.warning(
-                    "Analyzer is set to character, "
-                    "provided stop words will be ignored."
+                    "Analyzer is set to character, provided stop words will be ignored."
                 )
             if self.max_ngram == 1:
                 logger.warning(

rasa/server.py~L289

         raise ErrorResponse(
             HTTPStatus.BAD_REQUEST,
             "BadRequest",
-            "Invalid parameter value for 'include_events'. "
-            "Should be one of {}".format(enum_values),
+            "Invalid parameter value for 'include_events'. Should be one of {}".format(
+                enum_values
+            ),
             {"parameter": "include_events", "in": "query"},
         )
 

rasa/shared/core/domain.py~L198

             domain = cls.from_directory(path)
         else:
             raise InvalidDomain(
-                "Failed to load domain specification from '{}'. "
-                "File not found!".format(os.path.abspath(path))
+                "Failed to load domain specification from '{}'. File not found!".format(
+                    os.path.abspath(path)
+                )
             )
 
         return domain

rasa/shared/core/events.py~L1964

 
     def __str__(self) -> Text:
         """Returns text representation of event."""
-        return (
-            "ActionExecutionRejected("
-            "action: {}, policy: {}, confidence: {})"
-            "".format(self.action_name, self.policy, self.confidence)
+        return "ActionExecutionRejected(action: {}, policy: {}, confidence: {})".format(
+            self.action_name, self.policy, self.confidence
         )
 
     def __hash__(self) -> int:

rasa/shared/core/generator.py~L401

 
             if num_active_trackers:
                 logger.debug(
-                    "Starting {} ... (with {} trackers)" "".format(
+                    "Starting {} ... (with {} trackers)".format(
                         phase_name, num_active_trackers
                     )
                 )

rasa/shared/core/generator.py~L517

                     phase = 0
                 else:
                     logger.debug(
-                        "Found {} unused checkpoints " "in current phase." "".format(
+                        "Found {} unused checkpoints in current phase.".format(
                             len(unused_checkpoints)
                         )
                     )
                     logger.debug(
-                        "Found {} active trackers " "for these checkpoints." "".format(
+                        "Found {} active trackers for these checkpoints.".format(
                             num_active_trackers
                         )
                     )

rasa/shared/core/generator.py~L553

                 augmented_trackers, self.config.max_number_of_augmented_trackers
             )
             logger.debug(
-                "Subsampled to {} augmented training trackers." "".format(
+                "Subsampled to {} augmented training trackers.".format(
                     len(augmented_trackers)
                 )
             )

rasa/shared/core/trackers.py~L634

         """
         if not isinstance(dialogue, Dialogue):
             raise ValueError(
-                f"story {dialogue} is not of type Dialogue. "
-                f"Have you deserialized it?"
+                f"story {dialogue} is not of type Dialogue. Have you deserialized it?"
             )
 
         self._reset()

rasa/shared/core/training_data/story_reader/story_reader.py~L83

         )
         if parsed_events is None:
             raise StoryParseError(
-                "Unknown event '{}'. It is Neither an event " "nor an action).".format(
+                "Unknown event '{}'. It is Neither an event nor an action).".format(
                     event_name
                 )
             )

rasa/shared/core/training_data/story_reader/yaml_story_reader.py~L334

 
         if not self.domain:
             logger.debug(
-                "Skipped validating if intent is in domain as domain " "is `None`."
+                "Skipped validating if intent is in domain as domain is `None`."
             )
             return
 

rasa/shared/nlu/training_data/formats/dialogflow.py~L34

 
         if fformat not in {DIALOGFLOW_INTENT, DIALOGFLOW_ENTITIES}:
             raise ValueError(
-                "fformat must be either {}, or {}" "".format(
+                "fformat must be either {}, or {}".format(
                     DIALOGFLOW_INTENT, DIALOGFLOW_ENTITIES
                 )
             )

rasa/utils/common.py~L308

         access_logger.addHandler(file_handler)
     if use_syslog:
         formatter = logging.Formatter(
-            "%(asctime)s [%(levelname)-5.5s] [%(process)d]" " %(message)s"
+            "%(asctime)s [%(levelname)-5.5s] [%(process)d] %(message)s"
         )
         socktype = SOCK_STREAM if syslog_protocol == TCP_PROTOCOL else SOCK_DGRAM
         syslog_handler = logging.handlers.SysLogHandler(

rasa/utils/endpoints.py~L33

         return EndpointConfig.from_dict(content[endpoint_type])
     except FileNotFoundError:
         logger.error(
-            "Failed to read endpoint configuration " "from {}. No such file.".format(
+            "Failed to read endpoint configuration from {}. No such file.".format(
                 os.path.abspath(filename)
             )
         )

tests/engine/recipes/test_default_recipe.py~L98

         (
             "data/test_config/config_pretrained_embeddings_mitie.yml",
             "data/graph_schemas/config_pretrained_embeddings_mitie_train_schema.yml",
-            "data/graph_schemas/"
-            "config_pretrained_embeddings_mitie_predict_schema.yml",
+            "data/graph_schemas/config_pretrained_embeddings_mitie_predict_schema.yml",
             TrainingType.BOTH,
             False,
         ),

tests/graph_components/validators/test_default_recipe_validator.py~L114

     else:
         with pytest.warns(None) as records:
             validator.validate(dummy_importer)
-        assert len(records) == len(warnings), ", ".join(
-            warning.message.args[0] for warning in records
+        assert len(records) == len(warnings), (
+            ", ".join(warning.message.args[0] for warning in records)
         )
         assert [
             re.match(warning.message.args[0], expected_warning)

tests/graph_components/validators/test_default_recipe_validator.py~L780

     if should_warn:
         with pytest.warns(
             UserWarning,
-            match=(f"'{RulePolicy.__name__}' is not " "included in the model's "),
+            match=(f"'{RulePolicy.__name__}' is not included in the model's "),
         ) as records:
             validator.validate(importer)
     else:

tests/graph_components/validators/test_default_recipe_validator.py~L883

     num_duplicates: bool,
     priority: int,
 ):
-    assert (
-        len(policy_types) >= priority + num_duplicates
-    ), f"This tests needs at least {priority+num_duplicates} many types."
+    assert len(policy_types) >= priority + num_duplicates, (
+        f"This tests needs at least {priority+num_duplicates} many types."
+    )
 
     # start with a schema where node i has priority i
     nodes = {

tests/graph_components/validators/test_default_recipe_validator.py~L992

     with pytest.warns(
         UserWarning,
         match=(
-            "Found rule-based training data but no policy "
-            "supporting rule-based data."
+            "Found rule-based training data but no policy supporting rule-based data."
         ),
     ):
         validator.validate(importer)

tests/nlu/featurizers/test_regex_featurizer.py~L44

 
 
 @pytest.mark.parametrize(
-    "sentence, expected_sequence_features, expected_sentence_features,"
-    "labeled_tokens",
+    "sentence, expected_sequence_features, expected_sentence_features,labeled_tokens",
     [
         (
             "hey how are you today",

tests/nlu/featurizers/test_regex_featurizer.py~L219

 
 
 @pytest.mark.parametrize(
-    "sentence, expected_sequence_features, expected_sentence_features, "
-    "labeled_tokens",
+    "sentence, expected_sequence_features, expected_sentence_features, labeled_tokens",
     [
         (
             "lemonade and mapo tofu",

tests/nlu/featurizers/test_regex_featurizer.py~L383

 
 
 @pytest.mark.parametrize(
-    "sentence, expected_sequence_features, expected_sentence_features,"
-    "case_sensitive",
+    "sentence, expected_sequence_features, expected_sentence_features,case_sensitive",
     [
         ("Hey How are you today", [0.0, 0.0, 0.0], [0.0, 0.0, 0.0], True),
         ("Hey How are you today", [0.0, 1.0, 0.0], [0.0, 1.0, 0.0], False),

tests/nlu/featurizers/test_spacy_featurizer.py~L133

         vecs = ftr._features_for_doc(doc)
         vecs_capitalized = ftr._features_for_doc(doc_capitalized)
 
-        assert np.allclose(
-            vecs, vecs_capitalized, atol=1e-5
-        ), "Vectors are unequal for texts '{}' and '{}'".format(
-            e.get(TEXT), e.get(TEXT).capitalize()
+        assert np.allclose(vecs, vecs_capitalized, atol=1e-5), (
+            "Vectors are unequal for texts '{}' and '{}'".format(
+                e.get(TEXT), e.get(TEXT).capitalize()
+            )
         )
 
 

Snowflake-Labs/snowcli (+9 -7 lines across 2 files)

src/snowflake/cli/_plugins/nativeapp/artifacts.py~L248

     def __init__(self, *, project_root: Path, deploy_root: Path):
         # If a relative path ends up here, it's a bug in the app and can lead to other
         # subtle bugs as paths would be resolved relative to the current working directory.
-        assert (
-            project_root.is_absolute()
-        ), f"Project root {project_root} must be an absolute path."
-        assert (
-            deploy_root.is_absolute()
-        ), f"Deploy root {deploy_root} must be an absolute path."
+        assert project_root.is_absolute(), (
+            f"Project root {project_root} must be an absolute path."
+        )
+        assert deploy_root.is_absolute(), (
+            f"Deploy root {deploy_root} must be an absolute path."
+        )
 
         self._project_root: Path = resolve_without_follow(project_root)
         self._deploy_root: Path = resolve_without_follow(deploy_root)

tests_e2e/test_installation.py~L68

     assert "Initialized the new project in" in output
     for file in files_to_check:
         expected_generated_file = project_path / file
-        assert expected_generated_file.exists(), f"[{expected_generated_file}] does not exist. It should be generated from templates directory."
+        assert expected_generated_file.exists(), (
+            f"[{expected_generated_file}] does not exist. It should be generated from templates directory."
+        )
 
 
 @pytest.mark.e2e

aiven/aiven-client (+2 -2 lines across 1 file)

aiven/client/cli.py~L868

                         types = spec["type"]
                         if isinstance(types, str) and types == "null":
                             print(
-                                "  {title}{description}\n" "     => --remove-option {name}".format(
+                                "  {title}{description}\n     => --remove-option {name}".format(
                                     name=name,
                                     title=spec["title"],
                                     description=description,

aiven/client/cli.py~L879

                                 types = [types]
                             type_str = " or ".join(t for t in types if t != "null")
                             print(
-                                "  {title}{description}\n" "     => -c {name}=<{type}>  {default}".format(
+                                "  {title}{description}\n     => -c {name}=<{type}>  {default}".format(
                                     name=name,
                                     type=type_str,
                                     default=default_desc,

aws/aws-sam-cli (+6 -8 lines across 4 files)

samcli/lib/deploy/deployer.py~L513

             The maximum duration in minutes to wait for the deployment to complete.
         """
         sys.stdout.write(
-            "\n{} - Waiting for stack create/update " "to complete\n".format(
-                datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-            )
+            "\n{} - Waiting for stack create/update to complete\n".format(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
         )
         sys.stdout.flush()
 

samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py~L319

         verify_intrinsic_type_list(
             value_list,
             IntrinsicResolver.FN_JOIN,
-            message="The list of values in {} after the " "delimiter must be a list".format(IntrinsicResolver.FN_JOIN),
+            message="The list of values in {} after the delimiter must be a list".format(IntrinsicResolver.FN_JOIN),
         )
 
         sanitized_value_list = [

samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py~L487

         verify_intrinsic_type_dict(
             top_level_value,
             IntrinsicResolver.FN_FIND_IN_MAP,
-            message="The TopLevelKey is missing in the Mappings dictionary in Fn::FindInMap " "for {}".format(
+            message="The TopLevelKey is missing in the Mappings dictionary in Fn::FindInMap for {}".format(
                 top_level_key
             ),
         )

samcli/lib/intrinsic_resolver/intrinsic_property_resolver.py~L496

         verify_non_null(
             second_level_value,
             IntrinsicResolver.FN_FIND_IN_MAP,
-            message="The SecondLevelKey is missing in the Mappings dictionary in Fn::FindInMap  " "for {}".format(
+            message="The SecondLevelKey is missing in the Mappings dictionary in Fn::FindInMap  for {}".format(
                 second_level_key
             ),
         )

samcli/lib/schemas/schemas_directory_hierarchy_builder.py~L11

 def get_package_hierarchy(schema_name):
     path = "schema"
     if schema_name.startswith("aws.partner-"):
-        path = path + "." "aws.partner"
+        path = path + ".aws.partner"
         tail = schema_name[len("aws.partner-") :]
         path = path + "." + sanitize_name(tail)
         return path.lower()

tests/integration/local/invoke/test_integrations_cli.py~L183

         self.assertEqual(
             process_stdout.decode("utf-8"),
             "",
-            msg="The return statement in the LambdaFunction " "should never return leading to an empty string",
+            msg="The return statement in the LambdaFunction should never return leading to an empty string",
         )
 
     @pytest.mark.flaky(reruns=3)

binary-husky/gpt_academic (+22 -24 lines across 8 files)

crazy_functions/SourceCode_Analyse.py~L28

     sys_prompt_array = []
     report_part_1 = []
 
-    assert (
-        len(file_manifest) <= 512
-    ), "源文件太多(超过512个), 请缩减输入文件的数量。或者,您也可以选择删除此行警告,并修改代码拆分file_manifest列表,从而实现分批次处理。"
+    assert len(file_manifest) <= 512, (
+        "源文件太多(超过512个), 请缩减输入文件的数量。或者,您也可以选择删除此行警告,并修改代码拆分file_manifest列表,从而实现分批次处理。"
+    )
     ############################## <第一步,逐个文件分析,多线程> ##################################
     for index, fp in enumerate(file_manifest):
         # 读取文件

crazy_functions/SourceCode_Comment.py~L35

     history_array = []
     sys_prompt_array = []
 
-    assert (
-        len(file_manifest) <= 512
-    ), "源文件太多(超过512个), 请缩减输入文件的数量。或者,您也可以选择删除此行警告,并修改代码拆分file_manifest列表,从而实现分批次处理。"
+    assert len(file_manifest) <= 512, (
+        "源文件太多(超过512个), 请缩减输入文件的数量。或者,您也可以选择删除此行警告,并修改代码拆分file_manifest列表,从而实现分批次处理。"
+    )
 
     # 建立文件树
     file_tree_struct = FileNode("root", build_manifest=True)

crazy_functions/agent_fns/python_comment_agent.py~L235

         # sanity check (testing/debugging only)
         if 0 and margin:
             for line in text.split("\n"):
-                assert not line or line.startswith(margin), "line = %r, margin = %r" % (
-                    line,
-                    margin,
+                assert not line or line.startswith(margin), (
+                    "line = %r, margin = %r" % (line, margin)
                 )
 
         if margin:

crazy_functions/latex_fns/latex_toolbox.py~L473

             main_file = insert_abstract(main_file)
         match_opt1 = pattern_opt1.search(main_file)
         match_opt2 = pattern_opt2.search(main_file)
-        assert (match_opt1 is not None) or (
-            match_opt2 is not None
-        ), "Cannot find paper abstract section!"
+        assert (match_opt1 is not None) or (match_opt2 is not None), (
+            "Cannot find paper abstract section!"
+        )
     return main_file
 
 

request_llms/bridge_chatglmft.py~L70

                     # with open(conf, 'r', encoding='utf8') as f:
                     #     model_args = json.loads(f.read())
                     CHATGLM_PTUNING_CHECKPOINT = get_conf("CHATGLM_PTUNING_CHECKPOINT")
-                    assert os.path.exists(
-                        CHATGLM_PTUNING_CHECKPOINT
-                    ), "找不到微调模型检查点"
+                    assert os.path.exists(CHATGLM_PTUNING_CHECKPOINT), (
+                        "找不到微调模型检查点"
+                    )
                     conf = os.path.join(CHATGLM_PTUNING_CHECKPOINT, "config.json")
                     with open(conf, "r", encoding="utf8") as f:
                         model_args = json.loads(f.read())

shared_utils/config_loader.py~L116

             )
         else:
             log亮绿("[PROXY] 网络代理状态:已配置。配置信息如下:", str(r))
-            assert isinstance(
-                r, dict
-            ), "proxies格式错误,请注意proxies选项的格式,不要遗漏括号。"
+            assert isinstance(r, dict), (
+                "proxies格式错误,请注意proxies选项的格式,不要遗漏括号。"
+            )
     return r
 
 

tests/test_python_auto_docstring.py~L224

         # sanity check (testing/debugging only)
         if 0 and margin:
             for line in text.split("\n"):
-                assert not line or line.startswith(margin), "line = %r, margin = %r" % (
-                    line,
-                    margin,
+                assert not line or line.startswith(margin), (
+                    "line = %r, margin = %r" % (line, margin)
                 )
 
         if margin:

toolbox.py~L205

     """
     刷新用户界面
     """
-    assert isinstance(
-        chatbot, ChatBotWithCookies
-    ), "在传递chatbot的过程中不要将其丢弃。必要时, 可用clear将其清空, 然后用for+append循环重新赋值。"
+    assert isinstance(chatbot, ChatBotWithCookies), (
+        "在传递chatbot的过程中不要将其丢弃。必要时, 可用clear将其清空, 然后用for+append循环重新赋值。"
+    )
     cookies = chatbot.get_cookies()
     # 备份一份History作为记录
     cookies.update({"history": history})

bokeh/bokeh (+35 -42 lines across 17 files)

examples/basic/layouts/sizing_mode_multiple.py~L42

 heading = Div(
     sizing_mode="stretch_width",
     height=80,
-    text="In this wave example, the sliders on the left " "can be used to change the amplitude, frequency, phase, and offset of the wave.",
+    text="In this wave example, the sliders on the left can be used to change the amplitude, frequency, phase, and offset of the wave.",
 )
 
 layout = column(heading, row(widgets, plot), sizing_mode="stretch_both")

examples/plotting/sprint.py~L100

     y_units="screen",
     text_font_size="11px",
     text_color="silver",
-    text="This chart includes medals for the United States and " 'Australia in the "Intermediary" Games of 1906, which ' "the I.O.C. does not formally recognize.",
+    text='This chart includes medals for the United States and Australia in the "Intermediary" Games of 1906, which the I.O.C. does not formally recognize.',
 )
 plot.add_layout(disclaimer, "below")
 

src/bokeh/application/handlers/document_lifecycle.py~L64

         try:
             callback(session_context)
         except Exception as e:
-            log.warning("DocumentLifeCycleHandler on_session_destroyed " f"callback {callback} failed with following error: {e}")
+            log.warning(f"DocumentLifeCycleHandler on_session_destroyed callback {callback} failed with following error: {e}")
     if callbacks:
         # If any session callbacks were defined garbage collect after deleting all references
         del callback

src/bokeh/command/subcommands/serve.py~L613

             Argument(
                 metavar="ICO_PATH",
                 type=str,
-                help="Path to a .ico file to use as the favicon.ico, or 'none' to "
-                "disable favicon.ico support. If unset, a default Bokeh .ico "
-                "file will be used",
+                help="Path to a .ico file to use as the favicon.ico, or 'none' to disable favicon.ico support. If unset, a default Bokeh .ico file will be used",
                 default=None,
             ),
         ),

src/bokeh/command/subcommands/serve.py~L724

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request headers to exclude from the session " "context (by default all headers are included).",
+                help="A list of request headers to exclude from the session context (by default all headers are included).",
             ),
         ),
         (

src/bokeh/command/subcommands/serve.py~L733

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request cookies to exclude from the session " "context (by default all cookies are included).",
+                help="A list of request cookies to exclude from the session context (by default all cookies are included).",
             ),
         ),
         (

src/bokeh/command/subcommands/serve.py~L742

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request headers to make available in the session " "context (by default all headers are included).",
+                help="A list of request headers to make available in the session context (by default all headers are included).",
             ),
         ),
         (

src/bokeh/command/subcommands/serve.py~L751

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request cookies to make available in the session " "context (by default all cookies are included).",
+                help="A list of request cookies to make available in the session context (by default all cookies are included).",
             ),
         ),
         (

src/bokeh/command/subcommands/serve.py~L791

             Argument(
                 metavar="N",
                 action="store",
-                help="Number of worker processes for an app. Using " "0 will autodetect number of cores (defaults to 1)",
+                help="Number of worker processes for an app. Using 0 will autodetect number of cores (defaults to 1)",
                 default=1,
                 type=int,
             ),

src/bokeh/command/subcommands/serve.py~L814

             Argument(
                 metavar="BYTES",
                 action="store",
-                help="Set the Tornado websocket_max_message_size value " "(default: 20MB)",
+                help="Set the Tornado websocket_max_message_size value (default: 20MB)",
                 default=DEFAULT_WEBSOCKET_MAX_MESSAGE_SIZE_BYTES,
                 type=int,
             ),

src/bokeh/document/callbacks.py~L294

 
         if not issubclass(_CONCRETE_EVENT_CLASSES[event], DocumentEvent):
             raise ValueError(
-                "Document.on_event may only be used to subscribe " "to events of type DocumentEvent. To subscribe " "to a ModelEvent use the Model.on_event method."
+                "Document.on_event may only be used to subscribe to events of type DocumentEvent. To subscribe to a ModelEvent use the Model.on_event method."
             )
 
         for callback in callbacks:

src/bokeh/io/export.py~L444

         WebDriverWait(driver, timeout, poll_frequency=0.1).until(is_bokeh_render_complete)
     except TimeoutException:
         log.warning(
-            "The webdriver raised a TimeoutException while waiting for "
-            "a 'bokeh:idle' event to signify that the layout has rendered. "
-            "Something may have gone wrong."
+            "The webdriver raised a TimeoutException while waiting for a 'bokeh:idle' event to signify that the layout has rendered. Something may have gone wrong."
         )
     finally:
         _log_console(driver)

src/bokeh/io/webdriver.py~L21

 
 from ..util.dependencies import import_required  # isort:skip
 
-import_required("selenium.webdriver", "To use bokeh.io image export functions you need selenium " "('conda install selenium' or 'pip install selenium')")
+import_required("selenium.webdriver", "To use bokeh.io image export functions you need selenium ('conda install selenium' or 'pip install selenium')")
 
 # Standard library imports
 import atexit

src/bokeh/io/webdriver.py~L97

                 break
         else:
             raise RuntimeError(
-                "chromedriver or its variant is not installed or not present on PATH; " "use BOKEH_CHROMEDRIVER_PATH to specify a customized chromedriver's location"
+                "chromedriver or its variant is not installed or not present on PATH; use BOKEH_CHROMEDRIVER_PATH to specify a customized chromedriver's location"
             )
 
     service = Service(executable_path)

src/bokeh/models/sources.py~L286

             try:
                 _df.columns = ["_".join(col) for col in _df.columns.values]
             except TypeError:
-                raise TypeError("Could not flatten MultiIndex columns. " "use string column names or flatten manually")
+                raise TypeError("Could not flatten MultiIndex columns. use string column names or flatten manually")
         # Transform columns CategoricalIndex in list
         if isinstance(df.columns, pd.CategoricalIndex):
             _df.columns = df.columns.tolist()

src/bokeh/plotting/_renderer.py~L303

                     pass
                 else:
                     raise RuntimeError(
-                        "Color columns need to be of type uint32[N], uint8[N] or uint8/float[N, {3, 4}]" f" ({var} is {val.dtype}[{', '.join(map(str, val.shape))}]"
+                        f"Color columns need to be of type uint32[N], uint8[N] or uint8/float[N, {{3, 4}}] ({var} is {val.dtype}[{', '.join(map(str, val.shape))}]"
                     )
             elif val.ndim != 1:
                 raise RuntimeError(f"Columns need to be 1D ({var} is not)")

src/bokeh/plotting/graph.py~L78

         node_dict[attr_key] = values
 
     if "index" in node_attr_keys:
-        warn("Converting node attributes labeled 'index' are skipped. " "If you want to convert these attributes, please re-label with other names.")
+        warn("Converting node attributes labeled 'index' are skipped. If you want to convert these attributes, please re-label with other names.")
 
     node_dict["index"] = list(graph.nodes())
 

src/bokeh/plotting/graph.py~L95

         edge_dict[attr_key] = values
 
     if "start" in edge_attr_keys or "end" in edge_attr_keys:
-        warn("Converting edge attributes labeled 'start' or 'end' are skipped. " "If you want to convert these attributes, please re-label them with other names.")
+        warn("Converting edge attributes labeled 'start' or 'end' are skipped. If you want to convert these attributes, please re-label them with other names.")
 
     edge_dict["start"] = [x[0] for x in graph.edges()]
     edge_dict["end"] = [x[1] for x in graph.edges()]

src/bokeh/plotting/graph.py~L111

 
         node_keys = graph_renderer.node_renderer.data_source.data["index"]
         if set(node_keys) != set(layout_function.keys()):
-            warn("Node keys in 'layout_function' don't match node keys in the graph. " "These nodes may not be displayed correctly.")
+            warn("Node keys in 'layout_function' don't match node keys in the graph. These nodes may not be displayed correctly.")
 
     graph_renderer.layout_provider = StaticLayoutProvider(graph_layout=graph_layout)
 

src/bokeh/resources.py~L363

 
         if self.mode not in get_args(BaseMode):
             raise ValueError(
-                "wrong value for 'mode' parameter, expected " f"'inline', 'cdn', 'server(-dev)', 'relative(-dev)' or 'absolute(-dev)', got {mode}",
+                f"wrong value for 'mode' parameter, expected 'inline', 'cdn', 'server(-dev)', 'relative(-dev)' or 'absolute(-dev)', got {mode}",
             )
 
         if root_dir and not self.mode.startswith("relative"):

src/bokeh/resources.py~L646

             RuntimeMessage(
                 type="warn",
                 text=(
-                    f"Requesting CDN BokehJS version '{version}' from local development version '{__version__}'. "
-                    "This configuration is unsupported and may not work!"
+                    f"Requesting CDN BokehJS version '{version}' from local development version '{__version__}'. This configuration is unsupported and may not work!"
                 ),
             )
         )

src/bokeh/server/server.py~L448

 
             if opts.num_procs != 1:
                 assert all(app_context.application.safe_to_fork for app_context in tornado_app.applications.values()), (
-                    "User application code has run before attempting to start " "multiple processes. This is considered an unsafe operation."
+                    "User application code has run before attempting to start multiple processes. This is considered an unsafe operation."
                 )
 
             http_server = HTTPServer(tornado_app, **http_server_kwargs)

src/bokeh/server/tornado.py~L342

             raise ValueError("mem_log_frequency_milliseconds must be >= 0")
         elif mem_log_frequency_milliseconds > 0:
             if psutil is None:
-                log.warning(
-                    "Memory logging requested, but is disabled. Optional dependency 'psutil' is missing. " "Try 'pip install psutil' or 'conda install psutil'"
-                )
+                log.warning("Memory logging requested, but is disabled. Optional dependency 'psutil' is missing. Try 'pip install psutil' or 'conda install psutil'")
                 mem_log_frequency_milliseconds = 0
             elif mem_log_frequency_milliseconds != DEFAULT_MEM_LOG_FREQ_MS:
                 log.info("Log memory usage every %d milliseconds", mem_log_frequency_milliseconds)

src/bokeh/util/token.py~L269

         using_sysrandom = True
         return sysrandom, using_sysrandom
     except NotImplementedError:
-        warn("A secure pseudo-random number generator is not available " "on your system. Falling back to Mersenne Twister.")
+        warn("A secure pseudo-random number generator is not available on your system. Falling back to Mersenne Twister.")
         if settings.secret_key() is None:
             warn(
                 "A secure pseudo-random number generator is not available "

src/typings/bs4.pyi~L11

 
 class ResultSet(list[PageElement]): ...
 
-_Features: TypeAlias = Literal["lxml", "lxml-xml", "html.parser", "html5lib" "html", "html5", "xml"]
+_Features: TypeAlias = Literal["lxml", "lxml-xml", "html.parser", "html5libhtml", "html5", "xml"]
 
 class BeautifulSoup:
     def __init__(self, markup: str | IO[str] = ..., features: _Features | None = ...) -> None: ...

tests/unit/bokeh/command/subcommands/test_serve.py~L270

             Argument(
                 metavar="ICO_PATH",
                 type=str,
-                help="Path to a .ico file to use as the favicon.ico, or 'none' to "
-                "disable favicon.ico support. If unset, a default Bokeh .ico "
-                "file will be used",
+                help="Path to a .ico file to use as the favicon.ico, or 'none' to disable favicon.ico support. If unset, a default Bokeh .ico file will be used",
                 default=None,
             ),
         ),

tests/unit/bokeh/command/subcommands/test_serve.py~L381

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request headers to exclude from the session " "context (by default all headers are included).",
+                help="A list of request headers to exclude from the session context (by default all headers are included).",
             ),
         ),
         (

tests/unit/bokeh/command/subcommands/test_serve.py~L390

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request cookies to exclude from the session " "context (by default all cookies are included).",
+                help="A list of request cookies to exclude from the session context (by default all cookies are included).",
             ),
         ),
         (

tests/unit/bokeh/command/subcommands/test_serve.py~L399

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request headers to make available in the session " "context (by default all headers are included).",
+                help="A list of request headers to make available in the session context (by default all headers are included).",
             ),
         ),
         (

tests/unit/bokeh/command/subcommands/test_serve.py~L408

                 action="store",
                 default=None,
                 nargs="+",
-                help="A list of request cookies to make available in the session " "context (by default all cookies are included).",
+                help="A list of request cookies to make available in the session context (by default all cookies are included).",
             ),
         ),
         (

tests/unit/bokeh/command/subcommands/test_serve.py~L448

             Argument(
                 metavar="N",
                 action="store",
-                help="Number of worker processes for an app. Using " "0 will autodetect number of cores (defaults to 1)",
+                help="Number of worker processes for an app. Using 0 will autodetect number of cores (defaults to 1)",
                 default=1,
                 type=int,
             ),

tests/unit/bokeh/command/subcommands/test_serve.py~L471

             Argument(
                 metavar="BYTES",
                 action="store",
-                help="Set the Tornado websocket_max_message_size value " "(default: 20MB)",
+                help="Set the Tornado websocket_max_message_size value (default: 20MB)",
                 default=20 * 1024 * 1024,
                 type=int,
             ),

tests/unit/bokeh/core/test_templates.py~L80

     """
     with open(join(TOP_PATH, "_templates/autoload_nb_js.js"), mode="rb") as f:
         current_template_sha256 = compute_sha256(_crlf_cr_2_lf_bin(f.read()))
-        assert pinned_template_sha256 == current_template_sha256, """\
+        assert pinned_template_sha256 == current_template_sha256, (
+            """\
             It seems that the template autoload_nb_js.js has changed.
             If this is voluntary and that proper testing of plots insertion
             in notebooks has been completed successfully, update this test
             with the new file SHA256 signature."""
+        )
 
 
 def test_no_white_space_in_top_of_html() -> None:

docker/docker-py (+41 -50 lines across 21 files)

docker/api/client.py~L240

             return self.version(api_version=False)["ApiVersion"]
         except KeyError as ke:
             raise DockerException(
-                'Invalid response from docker daemon: key "ApiVersion"' " is missing."
+                'Invalid response from docker daemon: key "ApiVersion" is missing.'
             ) from ke
         except Exception as e:
             raise DockerException(

docker/api/container.py~L654

             kwargs = {}
         if "version" in kwargs:
             raise TypeError(
-                "create_host_config() got an unexpected " "keyword argument 'version'"
+                "create_host_config() got an unexpected keyword argument 'version'"
             )
         kwargs["version"] = self._version
         return HostConfig(*args, **kwargs)

docker/api/container.py~L1216

         if stream:
             if one_shot:
                 raise errors.InvalidArgument(
-                    "one_shot is only available in conjunction with " "stream=False"
+                    "one_shot is only available in conjunction with stream=False"
                 )
             return self._stream_helper(
                 self._get(url, stream=True, params=params), decode=decode

docker/api/container.py~L1357

         if restart_policy:
             if utils.version_lt(self._version, "1.23"):
                 raise errors.InvalidVersion(
-                    "restart policy update is not supported " "for API version < 1.23"
+                    "restart policy update is not supported for API version < 1.23"
                 )
             data["RestartPolicy"] = restart_policy
 

docker/api/network.py~L128

         if internal:
             if version_lt(self._version, "1.22"):
                 raise InvalidVersion(
-                    "Internal networks are not " "supported in API version < 1.22"
+                    "Internal networks are not supported in API version < 1.22"
                 )
             data["Internal"] = True
 

docker/api/swarm.py~L169

         if data_path_addr is not None:
             if utils.version_lt(self._version, "1.30"):
                 raise errors.InvalidVersion(
-                    "Data address path is only available for " "API version >= 1.30"
+                    "Data address path is only available for API version >= 1.30"
                 )
             data["DataPathAddr"] = data_path_addr
 
         if data_path_port is not None:
             if utils.version_lt(self._version, "1.40"):
                 raise errors.InvalidVersion(
-                    "Data path port is only available for " "API version >= 1.40"
+                    "Data path port is only available for API version >= 1.40"
                 )
             data["DataPathPort"] = data_path_port
 

docker/api/swarm.py~L264

         if data_path_addr is not None:
             if utils.version_lt(self._version, "1.30"):
                 raise errors.InvalidVersion(
-                    "Data address path is only available for " "API version >= 1.30"
+                    "Data address path is only available for API version >= 1.30"
                 )
             data["DataPathAddr"] = data_path_addr
 

docker/auth.py~L21

     index_name, remote_name = split_repo_name(repo_name)
     if index_name[0] == "-" or index_name[-1] == "-":
         raise errors.InvalidRepository(
-            f"Invalid index name ({index_name}). " "Cannot begin or end with a hyphen."
+            f"Invalid index name ({index_name}). Cannot begin or end with a hyphen."
         )
     return resolve_index_name(index_name), remote_name
 

docker/models/containers.py~L888

 
         if kwargs.get("network") and kwargs.get("network_mode"):
             raise RuntimeError(
-                'The options "network" and "network_mode" can not be used ' "together."
+                'The options "network" and "network_mode" can not be used together.'
             )
 
         if kwargs.get("networking_config") and not kwargs.get("network"):
             raise RuntimeError(
-                'The option "networking_config" can not be used ' 'without "network".'
+                'The option "networking_config" can not be used without "network".'
             )
 
         try:

docker/tls.py~L35

                 tls_cert, tls_key = client_cert
             except ValueError:
                 raise errors.TLSParameterError(
-                    "client_cert must be a tuple of" " (client certificate, key file)"
+                    "client_cert must be a tuple of (client certificate, key file)"
                 ) from None
 
             if not (tls_cert and tls_key) or (

docker/transport/npipeconn.py~L47

             if self.block:
                 raise urllib3.exceptions.EmptyPoolError(
                     self,
-                    "Pool reached maximum size and no more " "connections are allowed.",
+                    "Pool reached maximum size and no more connections are allowed.",
                 ) from None
             # Oh well, we'll create a new connection then
 

docker/transport/sshconn.py~L65

     def _write(self, data):
         if not self.proc or self.proc.stdin.closed:
             raise Exception(
-                "SSH subprocess not initiated." "connect() must be called first."
+                "SSH subprocess not initiated.connect() must be called first."
             )
         written = self.proc.stdin.write(data)
         self.proc.stdin.flush()

docker/transport/sshconn.py~L80

     def recv(self, n):
         if not self.proc:
             raise Exception(
-                "SSH subprocess not initiated." "connect() must be called first."
+                "SSH subprocess not initiated.connect() must be called first."
             )
         return self.proc.stdout.read(n)
 

docker/transport/sshconn.py~L148

             if self.block:
                 raise urllib3.exceptions.EmptyPoolError(
                     self,
-                    "Pool reached maximum size and no more " "connections are allowed.",
+                    "Pool reached maximum size and no more connections are allowed.",
                 ) from None
             # Oh well, we'll create a new connection then
 

docker/types/containers.py~L738

 
             if version_lt(version, "1.29") and "StartPeriod" in healthcheck:
                 raise errors.InvalidVersion(
-                    "healthcheck start period was introduced in API " "version 1.29"
+                    "healthcheck start period was introduced in API version 1.29"
                 )
 
         if isinstance(command, str):

docker/types/services.py~L304

                 self["BindOptions"] = {"Propagation": propagation}
             if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode]):
                 raise errors.Invali...*[Comment body truncated]*

@MichaReiser MichaReiser force-pushed the micha/format-implicit-concatenated-strings branch 2 times, most recently from 75afd64 to 0a86ffe Compare October 15, 2024 09:30
@MichaReiser MichaReiser force-pushed the micha/format-implicit-concatenated-strings branch from c0565b9 to c7380d4 Compare October 16, 2024 08:48
@MichaReiser
Copy link
Member Author

I like what I'm seeing in the diff. I don't like seeing another instability, ugh

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
formatter Related to the formatter preview Related to preview mode features
Projects
None yet
Development

Successfully merging this pull request may close these issues.

1 participant