从JIRA REST API中自动更新闪亮中的数据帧
我试图在一个闪亮的应用程序中自动更新一个数据帧,该应用程序将JIRA的rest API用于填充视觉效果 我借鉴了我的发现,但DB vs.Web Scrape似乎没有一个通用的解决方案 我尝试全局定义缺陷数据(并每6分钟全局更新一次),但当我尝试过滤全局数据帧时,我在eval:object'defects'not found Error中得到错误。 这是我用来收集更新的全局函数从JIRA REST API中自动更新闪亮中的数据帧,r,shiny,jira,shinydashboard,shiny-reactivity,R,Shiny,Jira,Shinydashboard,Shiny Reactivity,我试图在一个闪亮的应用程序中自动更新一个数据帧,该应用程序将JIRA的rest API用于填充视觉效果 我借鉴了我的发现,但DB vs.Web Scrape似乎没有一个通用的解决方案 我尝试全局定义缺陷数据(并每6分钟全局更新一次),但当我尝试过滤全局数据帧时,我在eval:object'defects'not found Error中得到错误。 这是我用来收集更新的全局函数 Getupdates <- function(qfrequency){ if(!exists("nextC
Getupdates <- function(qfrequency){
if(!exists("nextCall")){
lastcall <<- Sys.time()
message("Initiating")
baseURL <<- "https://oneit.wba.com/jira/rest/api/2/search?fields=created,resolutiondate,customfield_13601,customfield_11014,creator,assignee,reporter,customfield_13400,customfield_11014,customfield_12501,key,status&jql=project%20in%20(RXQE)%20AND%20issuetype%20=%20Bug%20AND%20%22Business%20Priority%22%20!=%20EMPTY%20AND%20status%20!=%20Removed&maxResults=500"
maxResults = 500
resultIndex <<- -1;
totalRecords <<- 0
pages <- list()
i<<-1
while(resultIndex < totalRecords){
url <<- paste0(baseURL, "&startAt=",resultIndex,"&maxResults=",maxResults) #CaPs MaTtER!
rawExport <<- fromJSON(url, flatten=TRUE)
resultIndex <<- rawExport$startAt + nrow(rawExport$issues)
totalRecords <<- rawExport$total
pages[[i]] <<- rawExport$issues
i<<-i+1
}
defects <- rbind_pages(pages)
nextCall <<- Sys.time() + qfrequency
message("Got Initial Data")
}
else if (Sys.time() >= nextCall){
lastcall <<- Sys.time()
message(paste0(Sys.time(), " Querying Periodically"))
baseURL <<- "https://oneit.wba.com/jira/rest/api/2/search?fields=created,resolutiondate,customfield_13601,customfield_11014,creator,assignee,reporter,customfield_13400,customfield_11014,customfield_12501,key,status&jql=project%20in%20(RXQE)%20AND%20issuetype%20=%20Bug%20AND%20%22Business%20Priority%22%20!=%20EMPTY%20AND%20status%20!=%20Removed&maxResults=500"
maxResults = 500
resultIndex <<- -1;
totalRecords <<- 0
pages <- list()
i<<-1
while(resultIndex < totalRecords){
url <<- paste0(baseURL, "&startAt=",resultIndex,"&maxResults=",maxResults) #CaPs MaTtER!
rawExport <<- fromJSON(url, flatten=TRUE)
resultIndex <<- rawExport$startAt + nrow(rawExport$issues)
totalRecords <<- rawExport$total
pages[[i]] <<- rawExport$issues
i<<-i+1
}
defects <- rbind_pages(pages)
nextCall <<- Sys.time() + qfrequency
message("Got Updated Data")
}
else{
return()
}
}
Getupdates你有没有尝试过缺陷你是个英雄。这似乎很顺利。非常感谢你!救了我一点耳光。你试过缺陷吗?你是个英雄。这似乎很顺利。非常感谢你!让我的耳朵少了一点叫喊声。
server <- function(input, output, session) {
observe({
autoInvalidate()
# 300 seconds is 5 minute updates
Getupdates(UpdateR8)
})
Quality <- reactive({
rawNames <- c("fields.created", "fields.resolutiondate", "fields.customfield_13601.value",
"fields.creator.displayName", "fields.assignee.displayName",
"fields.customfield_13400.value","fields.customfield_11014.value",
"fields.customfield_12501.value",
"key", "fields.status.name", "fields.reporter.displayName")
myBugs <- defects %>% dplyr::select(rawNames) })