我正在尝试在 google colab 中导入一个 python 脚本以合并到我的代码中。我的代码是用pytorch写的。我在运行代码时收到此错误消息:
from torch.utils.tensorboard import SummaryWriter
71
72 torch.manual_seed(42)
/usr/local/lib/python3.9/dist-packages/torch/utils/tensorboard/__init__.py in <module>
10 del tensorboard
11
---> 12 from .writer import FileWriter, SummaryWriter # noqa: F401
13 from tensorboard.summary.writer.record_writer import RecordWriter # noqa: F401
/usr/local/lib/python3.9/dist-packages/torch/utils/tensorboard/writer.py in <module>
14
15 from ._convert_np import make_np
---> 16 from ._embedding import (
17 make_mat,
18 make_sprite,
/usr/local/lib/python3.9/dist-packages/torch/utils/tensorboard/_embedding.py in <module>
7
8
----> 9 _HAS_GFILE_JOIN = hasattr(tf.io.gfile, "join")
10
11
/usr/local/lib/python3.9/dist-packages/tensorboard/lazy.py in __getattr__(self, attr_name)
63 class LazyModule(types.ModuleType):
64 def __getattr__(self, attr_name):
---> 65 return getattr(load_once(self), attr_name)
66
67 def __dir__(self):
/usr/local/lib/python3.9/dist-packages/tensorboard/lazy.py in wrapper(arg)
/usr/local/lib/python3.9/dist-packages/tensorboard/lazy.py in load_once(self)
48 raise ImportError("Circular import when resolving LazyModule %r" % name)
49 load_once.loading = True
---> 50 try:
51 module = load_fn()
52 finally:
/usr/local/lib/python3.9/dist-packages/tensorboard/compat/__init__.py in tf()
43 Raises:
44 ImportError: if a TF-like API is not available.
---> 45 """
46 try:
47 from tensorboard.compat import notf # pylint: disable=g-import-not-at-top
/usr/local/lib/python3.9/dist-packages/tensorflow/__init__.py in <module>
474 if hasattr(_current_module, "keras"):
475 try:
--> 476 _keras._load()
477 except ImportError:
478 pass
/usr/local/lib/python3.9/dist-packages/tensorflow/python/util/lazy_loader.py in _load(self)
39 """Load the module and insert it into the parent's globals."""
40 # Import the target module and insert it into the parent's namespace
---> 41 module = importlib.import_module(self.__name__)
42 self._parent_module_globals[self._local_name] = module
43
/usr/lib/python3.9/importlib/__init__.py in import_module(name, package)
125 break
126 level += 1
--> 127 return _bootstrap._gcd_import(name[level:], package, level)
128
129
/usr/local/lib/python3.9/dist-packages/keras/__init__.py in <module>
19 """
20 from keras import distribute
---> 21 from keras import models
22 from keras.engine.input_layer import Input
23 from keras.engine.sequential import Sequential
/usr/local/lib/python3.9/dist-packages/keras/models/__init__.py in <module>
16
17
---> 18 from keras.engine.functional import Functional
19 from keras.engine.sequential import Sequential
20 from keras.engine.training import Model
/usr/local/lib/python3.9/dist-packages/keras/engine/functional.py in <module>
32 from keras.engine import input_spec
33 from keras.engine import node as node_module
---> 34 from keras.engine import training as training_lib
35 from keras.engine import training_utils
36 from keras.saving.legacy import serialization
/usr/local/lib/python3.9/dist-packages/keras/engine/training.py in <module>
30 from keras.engine import base_layer
31 from keras.engine import base_layer_utils
---> 32 from keras.engine import compile_utils
33 from keras.engine import data_adapter
34 from keras.engine import input_layer as input_layer_module
/usr/local/lib/python3.9/dist-packages/keras/engine/compile_utils.py in <module>
22
23 from keras import losses as losses_mod
---> 24 from keras import metrics as metrics_mod
25 from keras.saving import saving_lib
26 from keras.utils import generic_utils
/usr/local/lib/python3.9/dist-packages/keras/metrics/__init__.py in <module>
82
83 # Confusion metrics
---> 84 from keras.metrics.confusion_metrics import AUC
85 from keras.metrics.confusion_metrics import FalseNegatives
86 from keras.metrics.confusion_metrics import FalsePositives
/usr/local/lib/python3.9/dist-packages/keras/metrics/confusion_metrics.py in <module>
20 import tensorflow.compat.v2 as tf
21
---> 22 from keras import activations
23 from keras import backend
24 from keras.dtensor import utils as dtensor_utils
/usr/local/lib/python3.9/dist-packages/keras/activations.py in <module>
19 import tensorflow.compat.v2 as tf
20
---> 21 import keras.layers.activation as activation_layers
22 from keras import backend
23 from keras.saving.legacy import serialization as legacy_serialization
/usr/local/lib/python3.9/dist-packages/keras/layers/__init__.py in <module>
18
19 from keras.engine.base_layer import Layer
---> 20 from keras.engine.base_preprocessing_layer import PreprocessingLayer
21
22 # Generic layers.
/usr/local/lib/python3.9/dist-packages/keras/engine/base_preprocessing_layer.py in <module>
19 import tensorflow.compat.v2 as tf
20
---> 21 from keras.engine import data_adapter
22 from keras.engine.base_layer import Layer
23 from keras.utils import version_utils
/usr/local/lib/python3.9/dist-packages/keras/engine/data_adapter.py in <module>
42
43 try:
---> 44 import pandas as pd
45 except ImportError:
46 pd = None
/usr/local/lib/python3.9/dist-packages/pandas/__init__.py in <module>
20
21 # numpy compat
---> 22 from pandas.compat import is_numpy_dev as _is_numpy_dev # pyright: ignore # noqa:F401
23
24 try:
/usr/local/lib/python3.9/dist-packages/pandas/compat/__init__.py in <module>
20 np_version_under1p21,
21 )
---> 22 from pandas.compat.pyarrow import (
23 pa_version_under1p01,
24 pa_version_under2p0,
/usr/local/lib/python3.9/dist-packages/pandas/compat/pyarrow.py in <module>
6
7 try:
----> 8 import pyarrow as pa
9
10 _pa_version = pa.__version__
/usr/local/lib/python3.9/dist-packages/pyarrow/__init__.py in <module>
63 _gc_enabled = _gc.isenabled()
64 _gc.disable()
---> 65 import pyarrow.lib as _lib
66 if _gc_enabled:
67 _gc.enable()
/usr/local/lib/python3.9/dist-packages/pyarrow/types.pxi in init pyarrow.lib()
/usr/local/lib/python3.9/dist-packages/pyarrow/types.pxi in pyarrow.lib._register_py_extension_type()
/usr/local/lib/python3.9/dist-packages/pyarrow/error.pxi in pyarrow.lib.check_status()
ArrowKeyError: A type extension with name arrow.py_extension_type already defined
关于我如何解决这个问题有什么建议吗?谢谢。