Python ODBC SQL Server驱动程序-连接正忙于处理另一个hstmt(0)(SQLExecDirectW)的结果

Python ODBC SQL Server驱动程序-连接正忙于处理另一个hstmt(0)(SQLExecDirectW)的结果,python,sql-server,python-3.x,angular,pyodbc,Python,Sql Server,Python 3.x,Angular,Pyodbc,错误: pyodbc.Error:('HY000','[HY000][Microsoft][ODBC SQL Server驱动程序]连接正忙于处理另一个hstmt(0)(SQLExecDirectW)的结果) 场景: 我正在尝试从前端(角度)上传txt、csv、excel文件。我使用python和pyodbc作为后端服务来连接MSSQL server 2017作为数据库 案例1: 当我发送单个文件源时,所有文件都正常工作 案例2: 对于多个文件上载,我已经为前端中的所有文件创建了一个列表,然后,

错误: pyodbc.Error:('HY000','[HY000][Microsoft][ODBC SQL Server驱动程序]连接正忙于处理另一个hstmt(0)(SQLExecDirectW)的结果)

场景: 我正在尝试从前端(角度)上传txt、csv、excel文件。我使用python和pyodbc作为后端服务来连接MSSQL server 2017作为数据库

案例1: 当我发送单个文件源时,所有文件都正常工作

案例2: 对于多个文件上载,我已经为前端中的所有文件创建了一个列表,然后,我将传递每个文件以调用服务(一个接一个地作为队列)

数据库连接字符串:

conn = pyodbc.connect('Driver={SQL Server};'
                      'Server=XXXXXXXXXXXXX;'
                      'Database=XXXXXXXXXXXXX;'
                      'UID=XXXXXXXXXXXXX;'
                      'PWD=XXXXXXXXXXXXX;'
                      'Trusted_Connection=True;'
                      'MARS_Connection=yes;')
def fileupload():

    try:
        cur = conn.cursor()
        #pkey_sys_role = random.randrange(1, 999, 1)
        file_name = request.get_json()['file_name']
        file_path = request.get_json()['file_path']
        file_sep = request.get_json()['file_sep']
        if file_sep != '':
            file_sep = re.escape(file_sep)
        else:
            file_sep = ','

        file_quotechar = request.get_json()['file_quotechar']
#        file_quotechar = re.escape(file_quotechar)

        file_skiprows = request.get_json()['file_skiprows']
        if file_skiprows != '':
            file_skiprows = int(file_skiprows)
        else:
            file_skiprows = 0

        file_header = request.get_json()['file_header']
        if file_header != '':
            file_header = int(file_header)
            start_pos = file_header+1
        else:
            file_header = None
            start_pos = 0



        file_encoding = request.get_json()['file_encoding']
#        if file_encoding != '':
#            file_encoding = str(file_encoding)
#        else:
#            file_encoding = None

        file_nrows = request.get_json()['file_nrows']
        if file_nrows != '':
            file_nrows = int(file_nrows)
        else:
            file_nrows = None

        file_quoting = request.get_json()['file_quoting']

        if file_quoting == '1':
            file_quoting = csv.QUOTE_ALL
#        elif file_quoting == '2':
#            file_quoting = csv.QUOTE_NONNUMERIC
        elif file_quoting == '3':
            file_quoting = csv.QUOTE_NONE
        else:
            file_quoting = csv.QUOTE_MINIMAL

#        print(file_quoting)

        created_by = request.get_json()['created_by']
        create_date = datetime.utcnow()
        output_dump = ''

        ctl_key = created_by+re.sub('[^A-Za-z0-9]+', '', file_name)
#        print(ctl_key)


    #    get file name only

        file_name_only = os.path.splitext(file_name)[0]
        file_type = os.path.splitext(file_name)[1]
#        print(file_type)

        allowed_file_types = ('.csv', '.txt', '.xls', '.xlt', '.xlm', '.xlsx', '.xlsm', '.xltx', '.xltm', '.xlsb', '.xla', '.xlam', '.xll', '.xlw')

        csvlist = []

        if file_type in allowed_file_types:
            #json conversion  
            if file_quotechar != '':
                reader = pd.read_csv(
                    file_path+''+file_name,      # relative python path to subdirectory
                    sep=file_sep,                   # Tab-separated value file.
                    quotechar=file_quotechar,               # single quote allowed as quote character
                    skiprows=range(start_pos, file_skiprows),               # Skip the first 10 rows of the file,
                    header=file_header,               #0  1st row as header
                    encoding = file_encoding,      #utf8   ascii
                    nrows=file_nrows,
                    engine='python'                  
               )
            else:
                reader = pd.read_csv(
                    file_path+''+file_name,      # relative python path to subdirectory
                    sep=file_sep,                   # Tab-separated value file.
                    skiprows=range(start_pos, file_skiprows),               # Skip the first 10 rows of the file,
                    header=file_header,               #0  1st row as header
                    encoding = file_encoding,      #utf8   ascii
                    nrows=file_nrows,
                    engine='python'                      
                )

            headerlist = list(reader.columns)
#                print(headerlist)
            tuples = [tuple(x) for x in reader.values]
    #        print(tuples)

    #        with open(file_path+''+file_name,'r') as f:
    #            reader = csv.reader(f, delimiter=file_delimiter, quoting=csv.QUOTE_NONE)
    #            headerlist = next(reader)

            csvlist = []
            for row in tuples:
                d = OrderedDict()
                for i, x in enumerate(row):
                    d[headerlist[i]] = x
                csvlist.append(d)
    #        print(csvlist)

            output_dump = json.dumps(csvlist)
            output_res = json.loads(output_dump)

            cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
            rv = cur.fetchone()

            if rv is not None:

                target_directory = home_directory+created_by+'\\'
                if not os.path.exists(target_directory):
                    os.makedirs(target_directory)

                target_file_name = file_name_only+'.json'
                with open(target_directory+''+target_file_name,'w', encoding='utf-8') as f:
                    json.dump(csvlist,f,ensure_ascii=False)

                cur.execute("UPDATE C_SYS_UPLOADED_FILES SET LAST_UPDATE_DATE= '" + str(create_date) +"', UPDATED_BY= '" + str(created_by) +"', TARGET_FILE_NAME= '" + str(target_file_name) +"', TARGET_FILE_PATH= '" + str(target_directory) +"', FILE_SEP= '" + str(file_sep) +"', FILE_QUOTECHAR= '" + str(file_quotechar) +"', FILE_SKIPROWS= '" + str(file_skiprows) +"', FILE_HEADER= '" + str(file_header) +"', FILE_ENCODING= '" + str(file_encoding) +"', FILE_NROWS= '" + str(file_nrows) +"', FILE_QUOTING= '" + str(file_quoting) +"' WHERE CTL_KEY= '" + str(ctl_key) +"'")
                conn.commit()
                cur.close()
                result = {'file_name':file_name,
                  'file_path':file_path,
                  'file_type':file_type,
                  'created_on':create_date,
                  'created_by':created_by,
                  'target_file_name':target_file_name,
                  'target_file_path':target_directory,
                  'last_update_date':create_date,
                  'updated_by':created_by,
                  'file_content':output_res,
                  'ctl_key':ctl_key,
                  'file_sep':file_sep,
                  'file_quotechar':file_quotechar,
                  'file_skiprows':file_skiprows,
                  'file_header':file_header,
                  'file_encoding':file_encoding,
                  'file_nrows':file_nrows,
                  'file_quoting':file_quoting,
                  'message':'File has been updated successfully'
                }
            else:
                cur.execute("INSERT INTO C_SYS_UPLOADED_FILES (CREATE_DATE, CREATED_BY, FILE_NAME, FILE_EXT, FILE_PATH, CTL_KEY) VALUES('" + 
                        str(create_date) + "','" +
                        str(created_by) + "','" +
                        str(file_name) + "','" +
                        str(file_type) + "','" +
                        str(file_path) + "','" +
                        str(ctl_key) + "')")
                conn.commit()
                cur.close()

                result = {'file_name':file_name,
                  'file_path':file_path,
                  'file_type':file_type,
                  'created_on':create_date,
                  'created_by':created_by,
                  'message':'File has been uploaded successfully',
                  'file_content':output_res
                }
        else:
            result = "Invalid file format. Please try again with a valid file!"
    except IOError as e:
        result = "I/O error"+str(e)
    except ValueError as e:
        result = "ValueError error"+str(e)
    except:
        result = "Unexpected error:"+str(sys.exc_info()[0])
        raise
    return jsonify(result) 
cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
pyodbc.Error: ('HY000', '[HY000] [Microsoft][ODBC SQL Server Driver]Connection is busy with results for another hstmt (0) (SQLExecDirectW)')
我尝试了几种打开和关闭数据库连接的配置

代码1:

conn = pyodbc.connect('Driver={SQL Server};'
                      'Server=XXXXXXXXXXXXX;'
                      'Database=XXXXXXXXXXXXX;'
                      'UID=XXXXXXXXXXXXX;'
                      'PWD=XXXXXXXXXXXXX;'
                      'Trusted_Connection=True;'
                      'MARS_Connection=yes;')
def fileupload():

    try:
        cur = conn.cursor()
        #pkey_sys_role = random.randrange(1, 999, 1)
        file_name = request.get_json()['file_name']
        file_path = request.get_json()['file_path']
        file_sep = request.get_json()['file_sep']
        if file_sep != '':
            file_sep = re.escape(file_sep)
        else:
            file_sep = ','

        file_quotechar = request.get_json()['file_quotechar']
#        file_quotechar = re.escape(file_quotechar)

        file_skiprows = request.get_json()['file_skiprows']
        if file_skiprows != '':
            file_skiprows = int(file_skiprows)
        else:
            file_skiprows = 0

        file_header = request.get_json()['file_header']
        if file_header != '':
            file_header = int(file_header)
            start_pos = file_header+1
        else:
            file_header = None
            start_pos = 0



        file_encoding = request.get_json()['file_encoding']
#        if file_encoding != '':
#            file_encoding = str(file_encoding)
#        else:
#            file_encoding = None

        file_nrows = request.get_json()['file_nrows']
        if file_nrows != '':
            file_nrows = int(file_nrows)
        else:
            file_nrows = None

        file_quoting = request.get_json()['file_quoting']

        if file_quoting == '1':
            file_quoting = csv.QUOTE_ALL
#        elif file_quoting == '2':
#            file_quoting = csv.QUOTE_NONNUMERIC
        elif file_quoting == '3':
            file_quoting = csv.QUOTE_NONE
        else:
            file_quoting = csv.QUOTE_MINIMAL

#        print(file_quoting)

        created_by = request.get_json()['created_by']
        create_date = datetime.utcnow()
        output_dump = ''

        ctl_key = created_by+re.sub('[^A-Za-z0-9]+', '', file_name)
#        print(ctl_key)


    #    get file name only

        file_name_only = os.path.splitext(file_name)[0]
        file_type = os.path.splitext(file_name)[1]
#        print(file_type)

        allowed_file_types = ('.csv', '.txt', '.xls', '.xlt', '.xlm', '.xlsx', '.xlsm', '.xltx', '.xltm', '.xlsb', '.xla', '.xlam', '.xll', '.xlw')

        csvlist = []

        if file_type in allowed_file_types:
            #json conversion  
            if file_quotechar != '':
                reader = pd.read_csv(
                    file_path+''+file_name,      # relative python path to subdirectory
                    sep=file_sep,                   # Tab-separated value file.
                    quotechar=file_quotechar,               # single quote allowed as quote character
                    skiprows=range(start_pos, file_skiprows),               # Skip the first 10 rows of the file,
                    header=file_header,               #0  1st row as header
                    encoding = file_encoding,      #utf8   ascii
                    nrows=file_nrows,
                    engine='python'                  
               )
            else:
                reader = pd.read_csv(
                    file_path+''+file_name,      # relative python path to subdirectory
                    sep=file_sep,                   # Tab-separated value file.
                    skiprows=range(start_pos, file_skiprows),               # Skip the first 10 rows of the file,
                    header=file_header,               #0  1st row as header
                    encoding = file_encoding,      #utf8   ascii
                    nrows=file_nrows,
                    engine='python'                      
                )

            headerlist = list(reader.columns)
#                print(headerlist)
            tuples = [tuple(x) for x in reader.values]
    #        print(tuples)

    #        with open(file_path+''+file_name,'r') as f:
    #            reader = csv.reader(f, delimiter=file_delimiter, quoting=csv.QUOTE_NONE)
    #            headerlist = next(reader)

            csvlist = []
            for row in tuples:
                d = OrderedDict()
                for i, x in enumerate(row):
                    d[headerlist[i]] = x
                csvlist.append(d)
    #        print(csvlist)

            output_dump = json.dumps(csvlist)
            output_res = json.loads(output_dump)

            cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
            rv = cur.fetchone()

            if rv is not None:

                target_directory = home_directory+created_by+'\\'
                if not os.path.exists(target_directory):
                    os.makedirs(target_directory)

                target_file_name = file_name_only+'.json'
                with open(target_directory+''+target_file_name,'w', encoding='utf-8') as f:
                    json.dump(csvlist,f,ensure_ascii=False)

                cur.execute("UPDATE C_SYS_UPLOADED_FILES SET LAST_UPDATE_DATE= '" + str(create_date) +"', UPDATED_BY= '" + str(created_by) +"', TARGET_FILE_NAME= '" + str(target_file_name) +"', TARGET_FILE_PATH= '" + str(target_directory) +"', FILE_SEP= '" + str(file_sep) +"', FILE_QUOTECHAR= '" + str(file_quotechar) +"', FILE_SKIPROWS= '" + str(file_skiprows) +"', FILE_HEADER= '" + str(file_header) +"', FILE_ENCODING= '" + str(file_encoding) +"', FILE_NROWS= '" + str(file_nrows) +"', FILE_QUOTING= '" + str(file_quoting) +"' WHERE CTL_KEY= '" + str(ctl_key) +"'")
                conn.commit()
                cur.close()
                result = {'file_name':file_name,
                  'file_path':file_path,
                  'file_type':file_type,
                  'created_on':create_date,
                  'created_by':created_by,
                  'target_file_name':target_file_name,
                  'target_file_path':target_directory,
                  'last_update_date':create_date,
                  'updated_by':created_by,
                  'file_content':output_res,
                  'ctl_key':ctl_key,
                  'file_sep':file_sep,
                  'file_quotechar':file_quotechar,
                  'file_skiprows':file_skiprows,
                  'file_header':file_header,
                  'file_encoding':file_encoding,
                  'file_nrows':file_nrows,
                  'file_quoting':file_quoting,
                  'message':'File has been updated successfully'
                }
            else:
                cur.execute("INSERT INTO C_SYS_UPLOADED_FILES (CREATE_DATE, CREATED_BY, FILE_NAME, FILE_EXT, FILE_PATH, CTL_KEY) VALUES('" + 
                        str(create_date) + "','" +
                        str(created_by) + "','" +
                        str(file_name) + "','" +
                        str(file_type) + "','" +
                        str(file_path) + "','" +
                        str(ctl_key) + "')")
                conn.commit()
                cur.close()

                result = {'file_name':file_name,
                  'file_path':file_path,
                  'file_type':file_type,
                  'created_on':create_date,
                  'created_by':created_by,
                  'message':'File has been uploaded successfully',
                  'file_content':output_res
                }
        else:
            result = "Invalid file format. Please try again with a valid file!"
    except IOError as e:
        result = "I/O error"+str(e)
    except ValueError as e:
        result = "ValueError error"+str(e)
    except:
        result = "Unexpected error:"+str(sys.exc_info()[0])
        raise
    return jsonify(result) 
cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
pyodbc.Error: ('HY000', '[HY000] [Microsoft][ODBC SQL Server Driver]Connection is busy with results for another hstmt (0) (SQLExecDirectW)')
输出:

conn = pyodbc.connect('Driver={SQL Server};'
                      'Server=XXXXXXXXXXXXX;'
                      'Database=XXXXXXXXXXXXX;'
                      'UID=XXXXXXXXXXXXX;'
                      'PWD=XXXXXXXXXXXXX;'
                      'Trusted_Connection=True;'
                      'MARS_Connection=yes;')
def fileupload():

    try:
        cur = conn.cursor()
        #pkey_sys_role = random.randrange(1, 999, 1)
        file_name = request.get_json()['file_name']
        file_path = request.get_json()['file_path']
        file_sep = request.get_json()['file_sep']
        if file_sep != '':
            file_sep = re.escape(file_sep)
        else:
            file_sep = ','

        file_quotechar = request.get_json()['file_quotechar']
#        file_quotechar = re.escape(file_quotechar)

        file_skiprows = request.get_json()['file_skiprows']
        if file_skiprows != '':
            file_skiprows = int(file_skiprows)
        else:
            file_skiprows = 0

        file_header = request.get_json()['file_header']
        if file_header != '':
            file_header = int(file_header)
            start_pos = file_header+1
        else:
            file_header = None
            start_pos = 0



        file_encoding = request.get_json()['file_encoding']
#        if file_encoding != '':
#            file_encoding = str(file_encoding)
#        else:
#            file_encoding = None

        file_nrows = request.get_json()['file_nrows']
        if file_nrows != '':
            file_nrows = int(file_nrows)
        else:
            file_nrows = None

        file_quoting = request.get_json()['file_quoting']

        if file_quoting == '1':
            file_quoting = csv.QUOTE_ALL
#        elif file_quoting == '2':
#            file_quoting = csv.QUOTE_NONNUMERIC
        elif file_quoting == '3':
            file_quoting = csv.QUOTE_NONE
        else:
            file_quoting = csv.QUOTE_MINIMAL

#        print(file_quoting)

        created_by = request.get_json()['created_by']
        create_date = datetime.utcnow()
        output_dump = ''

        ctl_key = created_by+re.sub('[^A-Za-z0-9]+', '', file_name)
#        print(ctl_key)


    #    get file name only

        file_name_only = os.path.splitext(file_name)[0]
        file_type = os.path.splitext(file_name)[1]
#        print(file_type)

        allowed_file_types = ('.csv', '.txt', '.xls', '.xlt', '.xlm', '.xlsx', '.xlsm', '.xltx', '.xltm', '.xlsb', '.xla', '.xlam', '.xll', '.xlw')

        csvlist = []

        if file_type in allowed_file_types:
            #json conversion  
            if file_quotechar != '':
                reader = pd.read_csv(
                    file_path+''+file_name,      # relative python path to subdirectory
                    sep=file_sep,                   # Tab-separated value file.
                    quotechar=file_quotechar,               # single quote allowed as quote character
                    skiprows=range(start_pos, file_skiprows),               # Skip the first 10 rows of the file,
                    header=file_header,               #0  1st row as header
                    encoding = file_encoding,      #utf8   ascii
                    nrows=file_nrows,
                    engine='python'                  
               )
            else:
                reader = pd.read_csv(
                    file_path+''+file_name,      # relative python path to subdirectory
                    sep=file_sep,                   # Tab-separated value file.
                    skiprows=range(start_pos, file_skiprows),               # Skip the first 10 rows of the file,
                    header=file_header,               #0  1st row as header
                    encoding = file_encoding,      #utf8   ascii
                    nrows=file_nrows,
                    engine='python'                      
                )

            headerlist = list(reader.columns)
#                print(headerlist)
            tuples = [tuple(x) for x in reader.values]
    #        print(tuples)

    #        with open(file_path+''+file_name,'r') as f:
    #            reader = csv.reader(f, delimiter=file_delimiter, quoting=csv.QUOTE_NONE)
    #            headerlist = next(reader)

            csvlist = []
            for row in tuples:
                d = OrderedDict()
                for i, x in enumerate(row):
                    d[headerlist[i]] = x
                csvlist.append(d)
    #        print(csvlist)

            output_dump = json.dumps(csvlist)
            output_res = json.loads(output_dump)

            cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
            rv = cur.fetchone()

            if rv is not None:

                target_directory = home_directory+created_by+'\\'
                if not os.path.exists(target_directory):
                    os.makedirs(target_directory)

                target_file_name = file_name_only+'.json'
                with open(target_directory+''+target_file_name,'w', encoding='utf-8') as f:
                    json.dump(csvlist,f,ensure_ascii=False)

                cur.execute("UPDATE C_SYS_UPLOADED_FILES SET LAST_UPDATE_DATE= '" + str(create_date) +"', UPDATED_BY= '" + str(created_by) +"', TARGET_FILE_NAME= '" + str(target_file_name) +"', TARGET_FILE_PATH= '" + str(target_directory) +"', FILE_SEP= '" + str(file_sep) +"', FILE_QUOTECHAR= '" + str(file_quotechar) +"', FILE_SKIPROWS= '" + str(file_skiprows) +"', FILE_HEADER= '" + str(file_header) +"', FILE_ENCODING= '" + str(file_encoding) +"', FILE_NROWS= '" + str(file_nrows) +"', FILE_QUOTING= '" + str(file_quoting) +"' WHERE CTL_KEY= '" + str(ctl_key) +"'")
                conn.commit()
                cur.close()
                result = {'file_name':file_name,
                  'file_path':file_path,
                  'file_type':file_type,
                  'created_on':create_date,
                  'created_by':created_by,
                  'target_file_name':target_file_name,
                  'target_file_path':target_directory,
                  'last_update_date':create_date,
                  'updated_by':created_by,
                  'file_content':output_res,
                  'ctl_key':ctl_key,
                  'file_sep':file_sep,
                  'file_quotechar':file_quotechar,
                  'file_skiprows':file_skiprows,
                  'file_header':file_header,
                  'file_encoding':file_encoding,
                  'file_nrows':file_nrows,
                  'file_quoting':file_quoting,
                  'message':'File has been updated successfully'
                }
            else:
                cur.execute("INSERT INTO C_SYS_UPLOADED_FILES (CREATE_DATE, CREATED_BY, FILE_NAME, FILE_EXT, FILE_PATH, CTL_KEY) VALUES('" + 
                        str(create_date) + "','" +
                        str(created_by) + "','" +
                        str(file_name) + "','" +
                        str(file_type) + "','" +
                        str(file_path) + "','" +
                        str(ctl_key) + "')")
                conn.commit()
                cur.close()

                result = {'file_name':file_name,
                  'file_path':file_path,
                  'file_type':file_type,
                  'created_on':create_date,
                  'created_by':created_by,
                  'message':'File has been uploaded successfully',
                  'file_content':output_res
                }
        else:
            result = "Invalid file format. Please try again with a valid file!"
    except IOError as e:
        result = "I/O error"+str(e)
    except ValueError as e:
        result = "ValueError error"+str(e)
    except:
        result = "Unexpected error:"+str(sys.exc_info()[0])
        raise
    return jsonify(result) 
cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
pyodbc.Error: ('HY000', '[HY000] [Microsoft][ODBC SQL Server Driver]Connection is busy with results for another hstmt (0) (SQLExecDirectW)')
我在上面的代码中尝试了不同的配置。我在MERGE语句之后添加了cur.close()和conn.close(),这样做之后,我得到以下错误

cur.execute("SELECT * FROM C_SYS_UPLOADED_FILES where (FILE_NAME = '" + str(file_name) +"' and CREATED_BY= '" + str(created_by) +"') or CTL_KEY= '" + str(ctl_key) +"' ")
pyodbc.ProgrammingError: ("The cursor's connection was closed.", 'HY000')

PS:现在我只上传多个csv文件。

问题是我没有正确处理数据库连接打开和关闭机制。因此,如果有人指出我做错了什么或我应该做什么,这将对我非常有帮助,因为我正处于学习阶段。问题是我没有正确处理数据库连接打开和关闭机制。所以,如果有人只是指出我做错了什么或我应该做什么,这将对我非常有帮助,因为我正处于学习阶段。