当前位置: 首页>>代码示例>>Python>>正文


Python Log.error方法代码示例

本文整理汇总了Python中remotespark.utils.log.Log.error方法的典型用法代码示例。如果您正苦于以下问题:Python Log.error方法的具体用法?Python Log.error怎么用?Python Log.error使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在remotespark.utils.log.Log的用法示例。


在下文中一共展示了Log.error方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: SparkMagicBase

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]
class SparkMagicBase(Magics):
    def __init__(self, shell, data=None):
        # You must call the parent constructor
        super(SparkMagicBase, self).__init__(shell)

        self.logger = Log("SparkMagics")
        self.ipython_display = IpythonDisplay()
        self.spark_controller = SparkController(self.ipython_display)

        try:
            should_serialize = conf.serialize()
            if should_serialize:
                self.logger.debug("Serialization enabled.")

                self.magics_home_path = get_magics_home_path()
                path_to_serialize = join_paths(self.magics_home_path, "state.json")

                self.logger.debug("Will serialize to {}.".format(path_to_serialize))

                self.spark_controller = SparkController(self.ipython_display, serialize_path=path_to_serialize)
            else:
                self.logger.debug("Serialization NOT enabled.")
        except KeyError:
            self.logger.error("Could not read env vars for serialization.")

        self.logger.debug("Initialized spark magics.")

    def execute_sqlquery(self, sqlquery, session, output_var, quiet):
        try:
            df = self.spark_controller.run_cell_sql(sqlquery, session)
            if output_var is not None:
                self.shell.user_ns[output_var] = df
            if quiet:
                return None
            else:
                return df
        except DataFrameParseException as e:
            self.ipython_display.send_error(e.out)
            return None

    @staticmethod
    def print_endpoint_info(info_sessions):
        sessions_info = ["        {}".format(i) for i in info_sessions]
        print("""Info for endpoint:
    Sessions:
{}
""".format("\n".join(sessions_info)))
开发者ID:gdtm86,项目名称:sparkmagic,代码行数:49,代码来源:sparkmagicsbase.py

示例2: ReliableHttpClient

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]
class ReliableHttpClient(object):
    """Http client that is reliable in its requests. Uses requests library."""

    def __init__(self, endpoint, headers, retry_policy):
        self._endpoint = endpoint
        self._headers = headers
        self._retry_policy = retry_policy
        self.logger = Log("ReliableHttpClient")

        self.verify_ssl = not conf.ignore_ssl_errors()
        if not self.verify_ssl:
            self.logger.debug("ATTENTION: Will ignore SSL errors. This might render you vulnerable to attacks.")
            requests.packages.urllib3.disable_warnings()

    def compose_url(self, relative_url):
        r_u = "/{}".format(relative_url.rstrip("/").lstrip("/"))
        return self._endpoint.url + r_u

    def get(self, relative_url, accepted_status_codes):
        """Sends a get request. Returns a response."""
        return self._send_request(relative_url, accepted_status_codes, requests.get)

    def post(self, relative_url, accepted_status_codes, data):
        """Sends a post request. Returns a response."""
        return self._send_request(relative_url, accepted_status_codes, requests.post, data)

    def delete(self, relative_url, accepted_status_codes):
        """Sends a delete request. Returns a response."""
        return self._send_request(relative_url, accepted_status_codes, requests.delete)

    def _send_request(self, relative_url, accepted_status_codes, function, data=None):
        return self._send_request_helper(self.compose_url(relative_url), accepted_status_codes, function, data, 0)

    def _send_request_helper(self, url, accepted_status_codes, function, data, retry_count):
        while True:
            try:
                if not self._endpoint.authenticate:
                    if data is None:
                        r = function(url, headers=self._headers, verify=self.verify_ssl)
                    else:
                        r = function(url, headers=self._headers, data=json.dumps(data), verify=self.verify_ssl)
                else:
                    if data is None:
                        r = function(url, headers=self._headers, auth=(self._endpoint.username, self._endpoint.password),
                                     verify=self.verify_ssl)
                    else:
                        r = function(url, headers=self._headers, auth=(self._endpoint.username, self._endpoint.password),
                                     data=json.dumps(data), verify=self.verify_ssl)
            except requests.exceptions.RequestException as e:
                error = True
                r = None
                status = None

                self.logger.error("Request to '{}' failed with '{}'".format(url, e))
            else:
                error = False
                status = r.status_code

            if error or status not in accepted_status_codes:
                if self._retry_policy.should_retry(status, error, retry_count):
                    sleep(self._retry_policy.seconds_to_sleep(retry_count))
                    retry_count += 1
                    continue
                else:
                    raise HttpClientException("Invalid status code '{}' or error '{}' from {}"
                                              .format(status, error, url))
            return r
开发者ID:mindis,项目名称:sparkmagic,代码行数:69,代码来源:reliablehttpclient.py

示例3: ClientManagerStateSerializer

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]
class ClientManagerStateSerializer(object):
    """Livy client manager state serializer"""

    def __init__(self, reader_writer):
        assert reader_writer is not None

        self.logger = Log("ClientManagerStateSerializer")
        self._ipython_display = IpythonDisplay()

        self._reader_writer = reader_writer

    def deserialize_state(self):
        self.logger.debug("Deserializing state.")

        clients_to_return = []

        lines = self._reader_writer.read_lines()
        line = ''.join(lines).strip()

        if line != '':
            self.logger.debug("Read content. Converting to JSON.")
            json_str = json.loads(line)
            clients = json_str["clients"]

            for client in clients:
                # Ignore version for now
                name = client["name"]
                session_id = client["id"]
                sql_context_created = client["sqlcontext"]
                kind = client["kind"].lower()
                connection_string = client["connectionstring"]

                session = self._create_livy_session(connection_string, {"kind": kind}, self._ipython_display,
                                                    session_id, sql_context_created)

                # Do not start session automatically. Just create it but skip is not existent.
                try:
                    # Get status to know if it's alive or not.
                    status = session.status
                    if not session.is_final_status(status):
                        self.logger.debug("Adding session {}".format(session_id))
                        client_obj = self._create_livy_client(session)
                        clients_to_return.append((name, client_obj))
                    else:
                        self.logger.error("Skipping serialized session '{}' because session was in status {}."
                                          .format(session.id, status))
                except (ValueError, ConnectionError) as e:
                    self.logger.error("Skipping serialized session '{}' because {}".format(session.id, str(e)))
        else:
            self.logger.debug("Empty manager state found.")

        return clients_to_return

    def serialize_state(self, name_client_dictionary):
        self.logger.debug("Serializing state.")

        serialized_clients = []
        for name in list(name_client_dictionary.keys()):
            client = name_client_dictionary[name]
            serialized_client = client.serialize()
            serialized_client["name"] = name
            serialized_clients.append(serialized_client)

        serialized_str = json.dumps({"clients": serialized_clients})
        self._reader_writer.overwrite_with_line(serialized_str)

    def _create_livy_session(self, connection_string, properties, ipython_display,
                             session_id, sql_context_created):
        return LivySession.from_connection_string(connection_string, properties, ipython_display,
                                                  session_id, sql_context_created)

    def _create_livy_client(self, session):
        return LivyClient(session)
开发者ID:gdtm86,项目名称:sparkmagic,代码行数:75,代码来源:clientmanagerstateserializer.py

示例4: SparkKernelBase

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]
class SparkKernelBase(IPythonKernel):
    def __init__(self, implementation, implementation_version, language, language_version, language_info,
                 session_language, user_code_parser=None, **kwargs):
        # Required by Jupyter - Override
        self.implementation = implementation
        self.implementation_version = implementation_version
        self.language = language
        self.language_version = language_version
        self.language_info = language_info

        # Override
        self.session_language = session_language

        super(SparkKernelBase, self).__init__(**kwargs)

        self.logger = Log("_jupyter_kernel".format(self.session_language))
        self._fatal_error = None
        self.ipython_display = IpythonDisplay()

        if user_code_parser is None:
            self.user_code_parser = UserCodeParser()
        else:
            self.user_code_parser = user_code_parser

        # Disable warnings for test env in HDI
        requests.packages.urllib3.disable_warnings()

        if not kwargs.get("testing", False):
            self._load_magics_extension()
            self._change_language()
            if conf.use_auto_viz():
                self._register_auto_viz()

    def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False):
        def f(self):
            if self._fatal_error is not None:
                return self._repeat_fatal_error()

            return self._do_execute(code, silent, store_history, user_expressions, allow_stdin)
        return wrap_unexpected_exceptions(f, self._complete_cell)(self)

    def do_shutdown(self, restart):
        # Cleanup
        self._delete_session()

        return self._do_shutdown_ipykernel(restart)

    def _do_execute(self, code, silent, store_history, user_expressions, allow_stdin):
        code_to_run = self.user_code_parser.get_code_to_run(code)

        res = self._execute_cell(code_to_run, silent, store_history, user_expressions, allow_stdin)

        return res

    def _load_magics_extension(self):
        register_magics_code = "%load_ext remotespark.kernels"
        self._execute_cell(register_magics_code, True, False, shutdown_if_error=True,
                           log_if_error="Failed to load the Spark kernels magics library.")
        self.logger.debug("Loaded magics.")

    def _change_language(self):
        register_magics_code = "%%_do_not_call_change_language -l {}\n ".format(self.session_language)
        self._execute_cell(register_magics_code, True, False, shutdown_if_error=True,
                           log_if_error="Failed to change language to {}.".format(self.session_language))
        self.logger.debug("Changed language.")

    def _register_auto_viz(self):
        register_auto_viz_code = """from remotespark.datawidgets.utils import display_dataframe
ip = get_ipython()
ip.display_formatter.ipython_display_formatter.for_type_by_name('pandas.core.frame', 'DataFrame', display_dataframe)"""
        self._execute_cell(register_auto_viz_code, True, False, shutdown_if_error=True,
                           log_if_error="Failed to register auto viz for notebook.")
        self.logger.debug("Registered auto viz.")

    def _delete_session(self):
        code = "%%_do_not_call_delete_session\n "
        self._execute_cell_for_user(code, True, False)

    def _execute_cell(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False,
                      shutdown_if_error=False, log_if_error=None):
        reply_content = self._execute_cell_for_user(code, silent, store_history, user_expressions, allow_stdin)

        if shutdown_if_error and reply_content[u"status"] == u"error":
            error_from_reply = reply_content[u"evalue"]
            if log_if_error is not None:
                message = "{}\nException details:\n\t\"{}\"".format(log_if_error, error_from_reply)
                return self._abort_with_fatal_error(message)

        return reply_content

    def _execute_cell_for_user(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False):
        return super(SparkKernelBase, self).do_execute(code, silent, store_history, user_expressions, allow_stdin)

    def _do_shutdown_ipykernel(self, restart):
        return super(SparkKernelBase, self).do_shutdown(restart)

    def _complete_cell(self):
        """A method that runs a cell with no effect. Call this and return the value it
        returns when there's some sort of error preventing the user's cell from executing; this
        will register the cell from the Jupyter UI as being completed."""
#.........这里部分代码省略.........
开发者ID:mindis,项目名称:sparkmagic,代码行数:103,代码来源:sparkkernelbase.py

示例5: SparkKernelBase

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]
class SparkKernelBase(IPythonKernel):
    def __init__(self, implementation, implementation_version, language, language_version, language_info,
                 kernel_conf_name, session_language, client_name, **kwargs):
        # Required by Jupyter - Override
        self.implementation = implementation
        self.implementation_version = implementation_version
        self.language = language
        self.language_version = language_version
        self.language_info = language_info

        # Override
        self.kernel_conf_name = kernel_conf_name
        self.session_language = session_language
        self.client_name = client_name

        super(SparkKernelBase, self).__init__(**kwargs)

        self._logger = Log(self.client_name)
        self._session_started = False
        self._fatal_error = None
        self._ipython_display = IpythonDisplay()

        self.user_command_parser = UserCommandParser()

        # Disable warnings for test env in HDI
        requests.packages.urllib3.disable_warnings()

        if not kwargs.get("testing", False):
            configuration = self._get_configuration()
            if not configuration:
                # _get_configuration() sets the error for us so we can just return now.
                # The kernel is not in a good state and all do_execute calls will
                # fail with the fatal error.
                return
            (username, password, url) = configuration
            self.connection_string = get_connection_string(url, username, password)
            self._load_magics_extension()
            if conf.use_auto_viz():
                self._register_auto_viz()

    def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False):
        if self._fatal_error is not None:
            self._repeat_fatal_error()

        # Parse command
        subcommand, force, output_var, command = self.user_command_parser.parse_user_command(code)

        # Get transformer
        transformer = self._get_code_transformer(subcommand)

        # Get instructions
        try:
            code_to_run, error_to_show, begin_action, end_action, deletes_session = \
                transformer.get_code_to_execute(self._session_started, self.connection_string,
                                                force, output_var, command)
        except SyntaxError as se:
            self._show_user_error("{}".format(se))
        else:
            # Execute instructions
            if error_to_show is not None:
                self._show_user_error(error_to_show)
                return self._execute_cell(code_to_run, silent, store_history, user_expressions, allow_stdin)

            if begin_action == Constants.delete_session_action:
                self._delete_session()
            elif begin_action == Constants.start_session_action:
                self._start_session()
            elif begin_action == Constants.do_nothing_action:
                pass
            else:
                raise ValueError("Begin action {} not supported.".format(begin_action))

            res = self._execute_cell(code_to_run, silent, store_history, user_expressions, allow_stdin)

            if end_action == Constants.delete_session_action:
                self._delete_session()
            elif end_action == Constants.start_session_action:
                self._start_session()
            elif end_action == Constants.do_nothing_action:
                pass
            else:
                raise ValueError("End action {} not supported.".format(end_action))

            if deletes_session:
                self._session_started = False

            return res

        return self._execute_cell("", silent, store_history, user_expressions, allow_stdin)

    def do_shutdown(self, restart):
        # Cleanup
        self._delete_session()

        return self._do_shutdown_ipykernel(restart)

    @staticmethod
    def _get_code_transformer(subcommand):
        if subcommand == UserCommandParser.run_command:
            return SparkTransformer(subcommand)
#.........这里部分代码省略.........
开发者ID:MohamedElKamhawy,项目名称:sparkmagic,代码行数:103,代码来源:sparkkernelbase.py

示例6: LivySession

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]

#.........这里部分代码省略.........

        if create_sql_context:
            self.create_sql_context()
        self._spark_events.emit_session_creation_end_event(self.guid, self.kind, self.id, self.status)

    def create_sql_context(self):
        """Create a sqlContext object on the session. Object will be accessible via variable 'sqlContext'."""
        if self.created_sql_context:
            return
        self.logger.debug("Starting '{}' hive session.".format(self.kind))
        self.ipython_display.writeln("Creating HiveContext as 'sqlContext'")
        command = self._get_sql_context_creation_command()
        try:
            command.execute(self)
        except LivyClientTimeoutError:
            raise LivyClientTimeoutError("Failed to create the SqlContext in time. Timed out after {} seconds."
                                         .format(self._wait_for_idle_timeout_seconds))
        self.created_sql_context = True

    def get_logs(self):
        log_array = self._http_client.get_all_session_logs(self.id)['log']
        self._logs = "\n".join(log_array)
        return self._logs

    @property
    def http_client(self):
        return self._http_client

    @staticmethod
    def is_final_status(status):
        return status in constants.FINAL_STATUS

    def delete(self):
        self.logger.debug("Deleting session '{}'".format(self.id))

        if self.status != constants.NOT_STARTED_SESSION_STATUS and self.status != constants.DEAD_SESSION_STATUS:
            self._http_client.delete_session(self.id)
            self.status = constants.DEAD_SESSION_STATUS
            self.id = -1
        else:
            raise ValueError("Cannot delete session {} that is in state '{}'."
                             .format(self.id, self.status))

    def wait_for_idle(self, seconds_to_wait=None):
        """Wait for session to go to idle status. Sleep meanwhile. Calls done every status_sleep_seconds as
        indicated by the constructor.

        Parameters:
            seconds_to_wait : number of seconds to wait before giving up.
        """
        if seconds_to_wait is None:
            seconds_to_wait = self._wait_for_idle_timeout_seconds

        while True:
            self._refresh_status()
            if self.status == constants.IDLE_SESSION_STATUS:
                return

            if self.status in constants.FINAL_STATUS:
                error = "Session {} unexpectedly reached final status '{}'. See logs:\n{}"\
                    .format(self.id, self.status, self.get_logs())
                self.logger.error(error)
                raise LivyUnexpectedStatusError(error)

            if seconds_to_wait <= 0.0:
                error = "Session {} did not reach idle status in time. Current status is {}."\
                    .format(self.id, self.status)
                self.logger.error(error)
                raise LivyClientTimeoutError(error)

            start_time = time()
            self.logger.debug("Session {} in state {}. Sleeping {} seconds."
                              .format(self.id, self.status, self._status_sleep_seconds))
            sleep(self._status_sleep_seconds)
            seconds_to_wait -= time() - start_time

    def sleep(self):
        sleep(self._statement_sleep_seconds)

    def _refresh_status(self):
        status = self._http_client.get_session(self.id)['state']

        if status in constants.POSSIBLE_SESSION_STATUS:
            self.status = status
        else:
            raise ValueError("Status '{}' not supported by session.".format(status))

        return self.status

    def _get_sql_context_creation_command(self):
        if self.kind == constants.SESSION_KIND_SPARK:
            sql_context_command = "val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)"
        elif self.kind == constants.SESSION_KIND_PYSPARK:
            sql_context_command = "from pyspark.sql import HiveContext\nsqlContext = HiveContext(sc)"
        elif self.kind == constants.SESSION_KIND_SPARKR:
            sql_context_command = "sqlContext <- sparkRHive.init(sc)"
        else:
            raise ValueError("Do not know how to create HiveContext in session of kind {}.".format(self.kind))

        return Command(sql_context_command)
开发者ID:cfregly,项目名称:sparkmagic,代码行数:104,代码来源:livysession.py

示例7: LivySession

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]

#.........这里部分代码省略.........
    def execute(self, commands):
        code = textwrap.dedent(commands)

        data = {"code": code}
        r = self._http_client.post(self._statements_url(), [201], data)
        statement_id = r.json()['id']
        
        return self._get_statement_output(statement_id)

    def delete(self):
        self.logger.debug("Deleting session '{}'".format(self.id))

        if self._status != Constants.not_started_session_status and self._status != Constants.dead_session_status:
            self._http_client.delete("/sessions/{}".format(self.id), [200, 404])
            self._status = Constants.dead_session_status
            self._state.session_id = "-1"
        else:
            raise ValueError("Cannot delete session {} that is in state '{}'."
                             .format(self.id, self._status))

    def wait_for_idle(self, seconds_to_wait):
        """Wait for session to go to idle status. Sleep meanwhile. Calls done every status_sleep_seconds as
        indicated by the constructor.

        Parameters:
            seconds_to_wait : number of seconds to wait before giving up.
        """
        self._refresh_status()
        current_status = self._status
        if current_status == Constants.idle_session_status:
            return

        if current_status in Constants.final_status:
            error = "Session {} unexpectedly reached final status {}. See logs:\n{}"\
                .format(self.id, current_status, self.logs)
            self.logger.error(error)
            raise LivyUnexpectedStatusError(error)

        if seconds_to_wait <= 0.0:
            error = "Session {} did not reach idle status in time. Current status is {}."\
                .format(self.id, current_status)
            self.logger.error(error)
            raise LivyClientTimeoutError(error)

        start_time = time()
        self.logger.debug("Session {} in state {}. Sleeping {} seconds."
                          .format(self.id, current_status, seconds_to_wait))
        sleep(self._status_sleep_seconds)
        elapsed = (time() - start_time)
        return self.wait_for_idle(seconds_to_wait - elapsed)

    def _statements_url(self):
        return "/sessions/{}/statements".format(self.id)

    def _refresh_status(self):
        status = self._get_latest_status()

        if status in Constants.possible_session_status:
            self._status = status
        else:
            raise ValueError("Status '{}' not supported by session.".format(status))

        return self._status

    def _refresh_logs(self):
        self._logs = self._get_latest_logs()
开发者ID:MohamedElKamhawy,项目名称:sparkmagic,代码行数:70,代码来源:livysession.py

示例8: RemoteSparkMagics

# 需要导入模块: from remotespark.utils.log import Log [as 别名]
# 或者: from remotespark.utils.log.Log import error [as 别名]
class RemoteSparkMagics(Magics):
    def __init__(self, shell, data=None):
        # You must call the parent constructor
        super(RemoteSparkMagics, self).__init__(shell)

        self.logger = Log("RemoteSparkMagics")
        self.ipython_display = IpythonDisplay()
        self.spark_controller = SparkController(self.ipython_display)

        try:
            should_serialize = conf.serialize()
            if should_serialize:
                self.logger.debug("Serialization enabled.")

                self.magics_home_path = get_magics_home_path()
                path_to_serialize = join_paths(self.magics_home_path, "state.json")

                self.logger.debug("Will serialize to {}.".format(path_to_serialize))

                self.spark_controller = SparkController(self.ipython_display, serialize_path=path_to_serialize)
            else:
                self.logger.debug("Serialization NOT enabled.")
        except KeyError:
            self.logger.error("Could not read env vars for serialization.")

        self.logger.debug("Initialized spark magics.")

    @magic_arguments()
    @argument("-c", "--context", type=str, default=Constants.context_name_spark,
              help="Context to use: '{}' for spark, '{}' for sql queries, and '{}' for hive queries. "
                   "Default is '{}'.".format(Constants.context_name_spark,
                                             Constants.context_name_sql,
                                             Constants.context_name_hive,
                                             Constants.context_name_spark))
    @argument("-s", "--session", help="The name of the Livy session to use. "
                                      "If only one session has been created, there's no need to specify one.")
    @argument("-o", "--output", type=str, default=None, help="If present, output when using SQL or Hive "
                                                             "query will be stored in variable of this name.")
    @argument("command", type=str, default=[""], nargs="*", help="Commands to execute.")
    @needs_local_scope
    @line_cell_magic
    def spark(self, line, cell="", local_ns=None):
        """Magic to execute spark remotely.

           This magic allows you to create a Livy Scala or Python session against a Livy endpoint. Every session can
           be used to execute either Spark code or SparkSQL code by executing against the SQL context in the session.
           When the SQL context is used, the result will be a Pandas dataframe of a sample of the results.

           If invoked with no subcommand, the cell will be executed against the specified session.

           Subcommands
           -----------
           info
               Display the available Livy sessions and other configurations for sessions.
           add
               Add a Livy session. First argument is the name of the session, second argument
               is the language, and third argument is the connection string of the Livy endpoint.
               A fourth argument specifying if session creation can be skipped if it already exists is optional:
               "skip" or empty.
               e.g. `%%spark add test python url=https://sparkcluster.net/livy;username=u;password=p skip`
               or
               e.g. `%%spark add test python url=https://sparkcluster.net/livy;username=u;password=p`
           config
               Override the livy session properties sent to Livy on session creation. All session creations will
               contain these config settings from then on.
               Expected value is a JSON key-value string to be sent as part of the Request Body for the POST /sessions
               endpoint in Livy.
               e.g. `%%spark config {"driverMemory":"1000M", "executorCores":4}`
           run
               Run Spark code against a session.
               e.g. `%%spark -s testsession` will execute the cell code against the testsession previously created
               e.g. `%%spark -s testsession -c sql` will execute the SQL code against the testsession previously created
               e.g. `%%spark -s testsession -c sql -o my_var` will execute the SQL code against the testsession
                        previously created and store the pandas dataframe created in the my_var variable in the
                        Python environment.
           logs
               Returns the logs for a given session.
               e.g. `%%spark logs -s testsession` will return the logs for the testsession previously created
           delete
               Delete a Livy session. Argument is the name of the session to be deleted.
               e.g. `%%spark delete defaultlivy`
           cleanup
               Delete all Livy sessions created by the notebook. No arguments required.
               e.g. `%%spark cleanup`
        """
        usage = "Please look at usage of %spark by executing `%spark?`."
        user_input = line
        args = parse_argstring(self.spark, user_input)

        subcommand = args.command[0].lower()

        try:
            # info
            if subcommand == "info":
                if len(args.command) == 2:
                    connection_string = args.command[1]
                    info_sessions = self.spark_controller.get_all_sessions_endpoint_info(connection_string)
                    self._print_endpoint_info(info_sessions)
                elif len(args.command) == 1:
                    self._print_local_info()
#.........这里部分代码省略.........
开发者ID:MohamedElKamhawy,项目名称:sparkmagic,代码行数:103,代码来源:remotesparkmagics.py


注:本文中的remotespark.utils.log.Log.error方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。