TypeError: can only concatenate list (not "str") to list
Opened this issue · 12 comments
shoghilin commented
When trying to run the sample code
from torchsummaryX import summary
import torch
from torch import nn
from torch.nn import functional as F
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = nn.Conv2d(10, 20, kernel_size=5)
self.conv2_drop = nn.Dropout2d()
self.fc1 = nn.Linear(320, 50)
self.fc2 = nn.Linear(50, 10)
def forward(self, x):
x = F.relu(F.max_pool2d(self.conv1(x), 2))
x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))
x = x.view(-1, 320)
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
x = self.fc2(x)
return F.log_softmax(x, dim=1)
summary(Net(), torch.zeros((1, 1, 28, 28)))
the error happened.
Traceback (most recent call last):
File "C:\Users\shoghi\Desktop\liver_segmentation\LTS_code\temp.py", line 23, in <module>
summary(Net(), torch.zeros((1, 1, 28, 28)))
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\torchsummaryX\torchsummaryX.py", line 101, in summary
df_sum = df.sum()
^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\generic.py", line 11512, in sum
return NDFrame.sum(self, axis, skipna, numeric_only, min_count, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\generic.py", line 11280, in sum
return self._min_count_stat_function(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\generic.py", line 11263, in _min_count_stat_function
return self._reduce(
^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\frame.py", line 10524, in _reduce
res = df._mgr.reduce(blk_func)
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\internals\managers.py", line 1534, in reduce
nbs = blk.reduce(func)
^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\internals\blocks.py", line 339, in reduce
result = func(self.values)
^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\frame.py", line 10487, in blk_func
return op(values, axis=axis, skipna=skipna, **kwds)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\nanops.py", line 96, in _f
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\nanops.py", line 421, in new_func
result = func(values, axis=axis, skipna=skipna, mask=mask, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\nanops.py", line 494, in newfunc
return func(values, axis=axis, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\pandas\core\nanops.py", line 652, in nansum
the_sum = values.sum(axis, dtype=dtype_sum)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\shoghi\anaconda3\envs\liver_segmentation\Lib\site-packages\numpy\core\_methods.py", line 49, in _sum
return umr_sum(a, axis, dtype, out, keepdims, initial, where)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: can only concatenate list (not "str") to list
kimwj94 commented
I got the same error.
nakasako commented
The same error to me.
nlune commented
Same error
AstrickHarren commented
same error for me
Topology2333 commented
same error on me
Topology2333 commented
though it works well on 1.1.0……
HamaJosh commented
same error on me and solve it to re-install 1.1.0 version
bigbigyellow commented
Lowering the pandas version can solve the problem. I use pandas==1.5.2 can solve this problem.
ankushbbbr commented
Lowering the pandas version can solve the problem. I use pandas==1.5.2 can solve this problem.
Worked for me
greyXUO commented
Worked for me too
gigwegbe commented
@bigbigyellow Thanks it worked.