对控制台| Python的许多调用w/out print()

对控制台| Python的许多调用w/out print(),python,printing,deep-learning,invoke,Python,Printing,Deep Learning,Invoke,在许多调用中,我的print语句不会显示在控制台上 我的程序的目的是在感知器中显示向前传球后的数学“计算”。然而,数学知识在这里并不重要 让我们粗略地假设引用的任何数学都是正确的 我的问题出现在Perceptron.py中的#--输出-- 请原谅程序的大小 main.py import os os.system('clear') import Perceptron import ActivationFunctions as af import numpy as np from math imp

在许多调用中,我的print语句不会显示在控制台上

我的程序的目的是在
感知器
中显示
向前传球
后的数学“计算”。然而,数学知识在这里并不重要

让我们粗略地假设引用的任何数学都是正确的

我的问题出现在
Perceptron.py
中的
#--
输出--

请原谅程序的大小

main.py

import os
os.system('clear')
import Perceptron
import ActivationFunctions as af
import numpy as np
from math import e

X = [[0.5, 0.3], [-0.5, 0.9], [0, -0.1], [1, 0]]
target = 0.7
LR = 0.01
dp = 5

# ----- Forward Pass -----
print('Forward Pass')

# -- INPUT --
in_str = 'in = '
for input in X:
  substr = '('+str(input[0])+' x '+str(input[1])+') + '
  in_str += substr
in_str = in_str[:-3]
print(in_str)

calcs = [x * y for x, y in X]
in_str = '   = '
for c in calcs:
  substr = '('+str(c)+') + '
  in_str += substr
in_str = in_str[:-3]
print(in_str)

ans = round(sum([x * y for x, y in X]), dp)
print('   = ' + str(ans))
print()

# PROBLEM OCCURS HERE
# -- OUTPUT --
# SIGMOID
out = af.invoker('softmax', LR, ans, dp)
print()
import numpy as np
from math import e

def binary_step(ans, dp):
  if ans >= 0: return 1
  else: return 0

def identity(ans, dp):
  return round(ans, dp)

def logistic(ans, dp):
  return round((1)/(1+(e**-ans)), dp)

def tanh(ans, dp):
  return round(((e**ans) - (e**-ans))/((e**ans) + (e**-ans)), dp)

def relu(ans, dp):
  if ans < 0: return 0
  else: return round(ans, dp)

def leaky_relu(LR, ans, dp):
  if ans < 0: return round(LR*ans, dp)
  else: return round(ans, dp)

def softmax(ans, dp):
  print('out = 1 / (1 + e^-'+str(+ans)+')')
  out = round(1 / (1 + e**-ans), dp)
  print('    = '+str(out))
  return out

def invoker(name, LR, ans, dp):
  name = name.lower()
  if 'binary' or 'step' in name: return binary_step(ans, dp)
  elif name == 'identity': return identity(ans, dp)
  elif name == 'logistic': return logistic(ans, dp)
  elif name == 'tanh': return tanh(ans, dp)
  elif name == 'relu': return relu(ans, dp)
  elif name == 'lrelu' or 'leaky' in name: return leaky_relu(LR, ans, dp)
  elif name == 'softmax': return softmax(ans, dp)
  else: print("ENTER VALID ACTIVATION FUNCTION")
Perceptron.py

import os
os.system('clear')
import Perceptron
import ActivationFunctions as af
import numpy as np
from math import e

X = [[0.5, 0.3], [-0.5, 0.9], [0, -0.1], [1, 0]]
target = 0.7
LR = 0.01
dp = 5

# ----- Forward Pass -----
print('Forward Pass')

# -- INPUT --
in_str = 'in = '
for input in X:
  substr = '('+str(input[0])+' x '+str(input[1])+') + '
  in_str += substr
in_str = in_str[:-3]
print(in_str)

calcs = [x * y for x, y in X]
in_str = '   = '
for c in calcs:
  substr = '('+str(c)+') + '
  in_str += substr
in_str = in_str[:-3]
print(in_str)

ans = round(sum([x * y for x, y in X]), dp)
print('   = ' + str(ans))
print()

# PROBLEM OCCURS HERE
# -- OUTPUT --
# SIGMOID
out = af.invoker('softmax', LR, ans, dp)
print()
import numpy as np
from math import e

def binary_step(ans, dp):
  if ans >= 0: return 1
  else: return 0

def identity(ans, dp):
  return round(ans, dp)

def logistic(ans, dp):
  return round((1)/(1+(e**-ans)), dp)

def tanh(ans, dp):
  return round(((e**ans) - (e**-ans))/((e**ans) + (e**-ans)), dp)

def relu(ans, dp):
  if ans < 0: return 0
  else: return round(ans, dp)

def leaky_relu(LR, ans, dp):
  if ans < 0: return round(LR*ans, dp)
  else: return round(ans, dp)

def softmax(ans, dp):
  print('out = 1 / (1 + e^-'+str(+ans)+')')
  out = round(1 / (1 + e**-ans), dp)
  print('    = '+str(out))
  return out

def invoker(name, LR, ans, dp):
  name = name.lower()
  if 'binary' or 'step' in name: return binary_step(ans, dp)
  elif name == 'identity': return identity(ans, dp)
  elif name == 'logistic': return logistic(ans, dp)
  elif name == 'tanh': return tanh(ans, dp)
  elif name == 'relu': return relu(ans, dp)
  elif name == 'lrelu' or 'leaky' in name: return leaky_relu(LR, ans, dp)
  elif name == 'softmax': return softmax(ans, dp)
  else: print("ENTER VALID ACTIVATION FUNCTION")
ActivationFunctions.py

import os
os.system('clear')
import Perceptron
import ActivationFunctions as af
import numpy as np
from math import e

X = [[0.5, 0.3], [-0.5, 0.9], [0, -0.1], [1, 0]]
target = 0.7
LR = 0.01
dp = 5

# ----- Forward Pass -----
print('Forward Pass')

# -- INPUT --
in_str = 'in = '
for input in X:
  substr = '('+str(input[0])+' x '+str(input[1])+') + '
  in_str += substr
in_str = in_str[:-3]
print(in_str)

calcs = [x * y for x, y in X]
in_str = '   = '
for c in calcs:
  substr = '('+str(c)+') + '
  in_str += substr
in_str = in_str[:-3]
print(in_str)

ans = round(sum([x * y for x, y in X]), dp)
print('   = ' + str(ans))
print()

# PROBLEM OCCURS HERE
# -- OUTPUT --
# SIGMOID
out = af.invoker('softmax', LR, ans, dp)
print()
import numpy as np
from math import e

def binary_step(ans, dp):
  if ans >= 0: return 1
  else: return 0

def identity(ans, dp):
  return round(ans, dp)

def logistic(ans, dp):
  return round((1)/(1+(e**-ans)), dp)

def tanh(ans, dp):
  return round(((e**ans) - (e**-ans))/((e**ans) + (e**-ans)), dp)

def relu(ans, dp):
  if ans < 0: return 0
  else: return round(ans, dp)

def leaky_relu(LR, ans, dp):
  if ans < 0: return round(LR*ans, dp)
  else: return round(ans, dp)

def softmax(ans, dp):
  print('out = 1 / (1 + e^-'+str(+ans)+')')
  out = round(1 / (1 + e**-ans), dp)
  print('    = '+str(out))
  return out

def invoker(name, LR, ans, dp):
  name = name.lower()
  if 'binary' or 'step' in name: return binary_step(ans, dp)
  elif name == 'identity': return identity(ans, dp)
  elif name == 'logistic': return logistic(ans, dp)
  elif name == 'tanh': return tanh(ans, dp)
  elif name == 'relu': return relu(ans, dp)
  elif name == 'lrelu' or 'leaky' in name: return leaky_relu(LR, ans, dp)
  elif name == 'softmax': return softmax(ans, dp)
  else: print("ENTER VALID ACTIVATION FUNCTION")

正如好心的评论家所指出的那样;在ActivationFunctions.py中:

方法
invoker()
中的初始
if语句需要在其条件中显式

if 'binary' or 'step' in name: return binary_step(ans, dp)
应该是:

if 'binary' in name or 'step' in name: return binary_step(ans, dp)

其中:
在名称
中声明了两次。否则,我们将提到一个可能的子字符串
二进制
,但原因是什么?

很有趣,好的。也许是我的环境。我正在使用repl.it。如果问题得到解决,将以不打印的理由进行报告。Sod定律。我使用了这个最小的代码解决方案,它可以工作:/。我将附加整个ActivationFunctions.py以及main。py@Reti43请用问题中的更新代码再试一次好吗?干杯。去英国。1小时后回来这能回答你的问题吗?