com.fasterxml.jackson.core.JsonParseException: Unrecognized token 'payload': was expecting ('true', 'false' or 'null') 2017-01-05T15:17:11.975129+00:00 app[web.1]: at [Source: payload=%7B%22zen%22%3A%22Non-blocking+is+better+than+blocking.%22%2C%22hook_id%22%3A11405663%2C%22hook%22%3A%7B%22type%22%3A%22Repository%22%2C%22id%22%3A11405663%2C%22name%22%3A%22web%22%2C%22active%22%3Atrue%2C%22events%22%3A%5B%22push%22%5D%2C%22config%22%3A%7B%22url%22%3A%22https%3A%2F%2Fmorning-crag-16002.herokuapp.com%2Fapplication%2Fgithub%2Fwebhooks%22%7D%2C%22updated_at%22%3A%222017-01-05T15%3A17%3A11Z%22%2C%22created_at%22%3A%222017-01-05T15%3A17%3A11Z%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%22%2C%22test_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%2Ftest%22%2C%22ping_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%2Fpings%22%2C%22last_response%22%3A%7B%22code%22%3Anull%2C%22status%22%3A%22unused%22%2C%22message%22%3Anull%7D%7D%2C%22repository%22%3A%7B%22id%22%3A77337343%2C%22name%22%3A%22fflib-apex-common%22%2C%22full_name%22%3A%22dieffrei%2Ffflib-apex-common%22%2C%22owner%22%3A%7B%22login%22%3A%22dieffrei%22%2C%22id%22%3A3090010%2C%22avatar_url%22%3A%22https%3A%2F%2Favatars.githubusercontent.com%2Fu%2F3090010%3Fv%3D3%22%2C%22gravatar_id%22%3A%22%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%22%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%22%2C%22followers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowers%22%2C%22following_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowing%7B%2Fother_user%7D%22%2C%22gists_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fgists%7B%2Fgist_id%7D%22%2C%22starred_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fstarred%7B%2Fowner%7D%7B%2Frepo%7D%22%2C%22subscriptions_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fsubscriptions%22%2C%22organizations_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Forgs%22%2C%22repos_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Frepos%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fevents%7B%2Fprivacy%7D%22%2C%22received_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Freceived_events%22%2C%22type%22%3A%22User%22%2C%22site_admin%22%3Afalse%7D%2C%22private%22%3Afalse%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common%22%2C%22description%22%3A%22Common+Apex+Library+supporting+Apex+Enterprise+Patterns+and+much+more%21%22%2C%22fork%22%3Atrue%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%22%2C%22forks_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fforks%22%2C%22keys_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fkeys%7B%2Fkey_id%7D%22%2C%22collaborators_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcollaborators%7B%2Fcollaborator%7D%22%2C%22teams_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fteams%22%2C%22hooks_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%22%2C%22issue_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%2Fevents%7B%2Fnumber%7D%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fevents%22%2C%22assignees_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fassignees%7B%2Fuser%7D%22%2C%22branches_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fbranches%7B%2Fbranch%7D%22%2C%22tags_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Ftags%22%2C%22blobs_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Fblobs%7B%2Fsha%7D%22%2C%22git_tags_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Ftags%7B%2Fsha%7D%22%2C%22git_refs_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Frefs%7B%2Fsha%7D%22%2C%22trees_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Ftrees%7B%2Fsha%7D%22%2C%22statuses_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fstatuses%2F%7Bsha%7D%22%2C%22languages_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Flanguages%22%2C%22stargazers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fstargazers%22%2C%22contributors_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcontributors%22%2C%22subscribers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fsubscribers%22%2C%22subscription_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fsubscription%22%2C%22commits_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcommits%7B%2Fsha%7D%22%2C%22git_commits_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Fcommits%7B%2Fsha%7D%22%2C%22comments_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcomments%7B%2Fnumber%7D%22%2C%22issue_comment_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%2Fcomments%7B%2Fnumber%7D%22%2C%22contents_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcontents%2F%7B%2Bpath%7D%22%2C%22compare_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcompare%2F%7Bbase%7D...%7Bhead%7D%22%2C%22merges_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fmerges%22%2C%22archive_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2F%7Barchive_format%7D%7B%2Fref%7D%22%2C%22downloads_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fdownloads%22%2C%22issues_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%7B%2Fnumber%7D%22%2C%22pulls_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fpulls%7B%2Fnumber%7D%22%2C%22milestones_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fmilestones%7B%2Fnumber%7D%22%2C%22notifications_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fnotifications%7B%3Fsince%2Call%2Cparticipating%7D%22%2C%22labels_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Flabels%7B%2Fname%7D%22%2C%22releases_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Freleases%7B%2Fid%7D%22%2C%22deployments_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fdeployments%22%2C%22created_at%22%3A%222016-12-25T18%3A07%3A36Z%22%2C%22updated_at%22%3A%222016-12-25T18%3A07%3A39Z%22%2C%22pushed_at%22%3A%222016-12-26T12%3A28%3A45Z%22%2C%22git_url%22%3A%22git%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common.git%22%2C%22ssh_url%22%3A%22git%40github.com%3Adieffrei%2Ffflib-apex-common.git%22%2C%22clone_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common.git%22%2C%22svn_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common%22%2C%22homepage%22%3A%22%22%2C%22size%22%3A19892%2C%22stargazers_count%22%3A0%2C%22watchers_count%22%3A0%2C%22language%22%3A%22Apex%22%2C%22has_issues%22%3Afalse%2C%22has_downloads%22%3Atrue%2C%22has_wiki%22%3Atrue%2C%22has_pages%22%3Afalse%2C%22forks_count%22%3A0%2C%22mirror_url%22%3Anull%2C%22open_issues_count%22%3A1%2C%22forks%22%3A0%2C%22open_issues%22%3A1%2C%22watchers%22%3A0%2C%22default_branch%22%3A%22master%22%7D%2C%22sender%22%3A%7B%22login%22%3A%22dieffrei%22%2C%22id%22%3A3090010%2C%22avatar_url%22%3A%22https%3A%2F%2Favatars.githubusercontent.com%2Fu%2F3090010%3Fv%3D3%22%2C%22gravatar_id%22%3A%22%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%22%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%22%2C%22followers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowers%22%2C%22following_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowing%7B%2Fother_user%7D%22%2C%22gists_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fgists%7B%2Fgist_id%7D%22%2C%22starred_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fstarred%7B%2Fowner%7D%7B%2Frepo%7D%22%2C%22subscriptions_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fsubscriptions%22%2C%22organizations_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Forgs%22%2C%22repos_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Frepos%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fevents%7B%2Fprivacy%7D%22%2C%22received_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Freceived_events%22%2C%22type%22%3A%22User%22%2C%22site_admin%22%3Afalse%7D%7D; line: 1, column: 8]

GitHub | dieffrei | 2 months ago
tip
Do you know that we can give you better hits? Get more relevant results from Samebug’s stack trace search.
  1. 0

    Webhook creation response error

    GitHub | 2 months ago | dieffrei
    com.fasterxml.jackson.core.JsonParseException: Unrecognized token 'payload': was expecting ('true', 'false' or 'null') 2017-01-05T15:17:11.975129+00:00 app[web.1]: at [Source: payload=%7B%22zen%22%3A%22Non-blocking+is+better+than+blocking.%22%2C%22hook_id%22%3A11405663%2C%22hook%22%3A%7B%22type%22%3A%22Repository%22%2C%22id%22%3A11405663%2C%22name%22%3A%22web%22%2C%22active%22%3Atrue%2C%22events%22%3A%5B%22push%22%5D%2C%22config%22%3A%7B%22url%22%3A%22https%3A%2F%2Fmorning-crag-16002.herokuapp.com%2Fapplication%2Fgithub%2Fwebhooks%22%7D%2C%22updated_at%22%3A%222017-01-05T15%3A17%3A11Z%22%2C%22created_at%22%3A%222017-01-05T15%3A17%3A11Z%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%22%2C%22test_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%2Ftest%22%2C%22ping_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%2Fpings%22%2C%22last_response%22%3A%7B%22code%22%3Anull%2C%22status%22%3A%22unused%22%2C%22message%22%3Anull%7D%7D%2C%22repository%22%3A%7B%22id%22%3A77337343%2C%22name%22%3A%22fflib-apex-common%22%2C%22full_name%22%3A%22dieffrei%2Ffflib-apex-common%22%2C%22owner%22%3A%7B%22login%22%3A%22dieffrei%22%2C%22id%22%3A3090010%2C%22avatar_url%22%3A%22https%3A%2F%2Favatars.githubusercontent.com%2Fu%2F3090010%3Fv%3D3%22%2C%22gravatar_id%22%3A%22%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%22%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%22%2C%22followers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowers%22%2C%22following_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowing%7B%2Fother_user%7D%22%2C%22gists_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fgists%7B%2Fgist_id%7D%22%2C%22starred_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fstarred%7B%2Fowner%7D%7B%2Frepo%7D%22%2C%22subscriptions_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fsubscriptions%22%2C%22organizations_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Forgs%22%2C%22repos_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Frepos%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fevents%7B%2Fprivacy%7D%22%2C%22received_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Freceived_events%22%2C%22type%22%3A%22User%22%2C%22site_admin%22%3Afalse%7D%2C%22private%22%3Afalse%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common%22%2C%22description%22%3A%22Common+Apex+Library+supporting+Apex+Enterprise+Patterns+and+much+more%21%22%2C%22fork%22%3Atrue%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%22%2C%22forks_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fforks%22%2C%22keys_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fkeys%7B%2Fkey_id%7D%22%2C%22collaborators_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcollaborators%7B%2Fcollaborator%7D%22%2C%22teams_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fteams%22%2C%22hooks_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%22%2C%22issue_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%2Fevents%7B%2Fnumber%7D%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fevents%22%2C%22assignees_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fassignees%7B%2Fuser%7D%22%2C%22branches_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fbranches%7B%2Fbranch%7D%22%2C%22tags_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Ftags%22%2C%22blobs_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Fblobs%7B%2Fsha%7D%22%2C%22git_tags_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Ftags%7B%2Fsha%7D%22%2C%22git_refs_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Frefs%7B%2Fsha%7D%22%2C%22trees_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Ftrees%7B%2Fsha%7D%22%2C%22statuses_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fstatuses%2F%7Bsha%7D%22%2C%22languages_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Flanguages%22%2C%22stargazers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fstargazers%22%2C%22contributors_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcontributors%22%2C%22subscribers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fsubscribers%22%2C%22subscription_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fsubscription%22%2C%22commits_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcommits%7B%2Fsha%7D%22%2C%22git_commits_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Fcommits%7B%2Fsha%7D%22%2C%22comments_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcomments%7B%2Fnumber%7D%22%2C%22issue_comment_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%2Fcomments%7B%2Fnumber%7D%22%2C%22contents_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcontents%2F%7B%2Bpath%7D%22%2C%22compare_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcompare%2F%7Bbase%7D...%7Bhead%7D%22%2C%22merges_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fmerges%22%2C%22archive_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2F%7Barchive_format%7D%7B%2Fref%7D%22%2C%22downloads_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fdownloads%22%2C%22issues_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%7B%2Fnumber%7D%22%2C%22pulls_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fpulls%7B%2Fnumber%7D%22%2C%22milestones_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fmilestones%7B%2Fnumber%7D%22%2C%22notifications_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fnotifications%7B%3Fsince%2Call%2Cparticipating%7D%22%2C%22labels_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Flabels%7B%2Fname%7D%22%2C%22releases_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Freleases%7B%2Fid%7D%22%2C%22deployments_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fdeployments%22%2C%22created_at%22%3A%222016-12-25T18%3A07%3A36Z%22%2C%22updated_at%22%3A%222016-12-25T18%3A07%3A39Z%22%2C%22pushed_at%22%3A%222016-12-26T12%3A28%3A45Z%22%2C%22git_url%22%3A%22git%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common.git%22%2C%22ssh_url%22%3A%22git%40github.com%3Adieffrei%2Ffflib-apex-common.git%22%2C%22clone_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common.git%22%2C%22svn_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common%22%2C%22homepage%22%3A%22%22%2C%22size%22%3A19892%2C%22stargazers_count%22%3A0%2C%22watchers_count%22%3A0%2C%22language%22%3A%22Apex%22%2C%22has_issues%22%3Afalse%2C%22has_downloads%22%3Atrue%2C%22has_wiki%22%3Atrue%2C%22has_pages%22%3Afalse%2C%22forks_count%22%3A0%2C%22mirror_url%22%3Anull%2C%22open_issues_count%22%3A1%2C%22forks%22%3A0%2C%22open_issues%22%3A1%2C%22watchers%22%3A0%2C%22default_branch%22%3A%22master%22%7D%2C%22sender%22%3A%7B%22login%22%3A%22dieffrei%22%2C%22id%22%3A3090010%2C%22avatar_url%22%3A%22https%3A%2F%2Favatars.githubusercontent.com%2Fu%2F3090010%3Fv%3D3%22%2C%22gravatar_id%22%3A%22%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%22%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%22%2C%22followers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowers%22%2C%22following_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowing%7B%2Fother_user%7D%22%2C%22gists_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fgists%7B%2Fgist_id%7D%22%2C%22starred_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fstarred%7B%2Fowner%7D%7B%2Frepo%7D%22%2C%22subscriptions_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fsubscriptions%22%2C%22organizations_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Forgs%22%2C%22repos_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Frepos%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fevents%7B%2Fprivacy%7D%22%2C%22received_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Freceived_events%22%2C%22type%22%3A%22User%22%2C%22site_admin%22%3Afalse%7D%7D; line: 1, column: 8]
  2. 0

    'pull_request' is absent in JSON

    GitHub | 3 years ago | yegor256
    java.lang.IllegalStateException: 'pull_request' is absent in JSON: {"url":"https://api.github.com/repos/jcabi/jcabi-immutable/issues/1","labels_url":"https://api.github.com/repos/jcabi/jcabi-immutable/issues/1/labels{/name}","comments_url":"https://api.github.com/repos/jcabi/jcabi-immutable/issues/1/comments","events_url":"https://api.github.com/repos/jcabi/jcabi-immutable/issues/1/events","html_url":"https://github.com/jcabi/jcabi-immutable/issues/1","id":29050296,"number":1,"title":"use com.jcabi:jcabi 1.4","user":{"login":"yegor256","id":526301,"avatar_url":"https://gravatar.com/avatar/70942ffdd8084b5a51e17e3c0996d53c?d=https%3A%2F%2Fidenticons.github.com%2F6bf5588e99f203473873966318476660.png&r=x","gravatar_id":"70942ffdd8084b5a51e17e3c0996d53c","url":"https://api.github.com/users/yegor256","html_url":"https://github.com/yegor256","followers_url":"https://api.github.com/users/yegor256/followers","following_url":"https://api.github.com/users/yegor256/following{/other_user}","gists_url":"https://api.github.com/users/yegor256/gists{/gist_id}","starred_url":"https://api.github.com/users/yegor256/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/yegor256/subscriptions","organizations_url":"https://api.github.com/users/yegor256/orgs","repos_url":"https://api.github.com/users/yegor256/repos","events_url":"https://api.github.com/users/yegor256/events{/privacy}","received_events_url":"https://api.github.com/users/yegor256/received_events","type":"User","site_admin":false},"labels":[{"url":"https://api.github.com/repos/jcabi/jcabi-immutable/labels/enhancement","name":"enhancement","color":"84b6eb"},{"url":"https://api.github.com/repos/jcabi/jcabi-immutable/labels/DEV","name":"DEV","color":"bfd4f3"},{"url":"https://api.github.com/repos/jcabi/jcabi-immutable/labels/%40carlosmiranda","name":"@carlosmiranda","color":"009801"},{"url":"https://api.github.com/repos/jcabi/jcabi-immutable/labels/30+mins","name":"30 mins","color":"eb6421"}],"state":"closed","assignee":null,"milestone":null,"comments":5,"created_at":"2014-03-09T15:21:23Z","updated_at":"2014-03-11T11:06:36Z","closed_at":"2014-03-10T12:59:17Z","body":"let's use `com.jcabi:jcabi:1.4` as parent `pom`"}
  3. 0

    Application crash when receiving camera events

    GitHub | 3 months ago | congngale
    com.firebase.client.FirebaseException: Failed to bounce to type
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    general - Bug#805813: llvm-toolchain-3.4 ftbfs with ISL 0.15 - msg#01908 - Recent Discussion OSDir.com

    osdir.com | 11 months ago
    hive_on_spark.hqlSLF4J: Class path contains multiple SLF4J bindings.SLF4J: Found binding in [jar:file:/usr/lib/spark/lib/spark-assembly-1.5.2-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J: Found binding in [jar:file:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]SLF4J: Class path contains multiple SLF4J bindings.SLF4J: Found binding in [jar:file:/usr/lib/spark/lib/spark-assembly-1.5.2-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J: Found binding in [jar:file:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]15/11/30 16:57:56 [main]: DEBUG common.LogUtils: Using hive-site.xml found on CLASSPATH at /usr/lib/hive/conf/hive-site.xml15/11/30 16:57:57 [main]: DEBUG session.SessionState: SessionState user: null Logging initialized using configuration in jar:file:/usr/lib/hive/lib/hive-common-1.2.1.jar!/hive-log4j.properties15/11/30 16:57:57 [main]: INFO SessionState:Logging initialized using configuration in jar:file:/usr/lib/hive/lib/hive-common-1.2.1.jar!/hive-log4j.properties15/11/30 16:57:57 [main]: DEBUG parse.VariableSubstitution: Substitution is on: hive15/11/30 16:57:57 [main]: DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of successful kerberos logins and latency (milliseconds)], always=false, type=DEFAULT, sampleName=Ops)15/11/30 16:57:57 [main]: DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of failed kerberos logins and latency (milliseconds)], always=false, type=DEFAULT, sampleName=Ops)15/11/30 16:57:57 [main]: DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[GetGroups], always=false, type=DEFAULT, sampleName=Ops)15/11/30 16:57:57 [main]: DEBUG impl.MetricsSystemImpl: UgiMetrics, User and group related metrics15/11/30 16:57:57 [main]: DEBUG security.Groups:  Creating new Groups object15/11/30 16:57:57 [main]: DEBUG util.NativeCodeLoader: Trying to load the custom-built native-hadoop library...15/11/30 16:57:57 [main]: DEBUG util.NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: /home/hduser/hadoop-2.6.0/lib/native/libhadoop.so.1.0.0: /lib64/libc.so.6: version `GLIBC_2.12' not found (required by /home/hduser/hadoop-2.6.0/lib/native/libhadoop.so.1.0.0)15/11/30 16:57:57 [main]: DEBUG util.NativeCodeLoader: java.library.path=/home/hduser/hadoop-2.6.0/lib/native15/11/30 16:57:57 [main]: WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable15/11/30 16:57:57 [main]: DEBUG util.PerformanceAdvisory: Falling back to shell based15/11/30 16:57:57 [main]: DEBUG security.JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping15/11/30 16:57:57 [main]: DEBUG security.Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=500015/11/30 16:57:57 [main]: DEBUG security.UserGroupInformation: hadoop login15/11/30 16:57:57 [main]: DEBUG security.UserGroupInformation: hadoop login commit15/11/30 16:57:57 [main]: DEBUG security.UserGroupInformation: using local user:UnixPrincipal: hduser15/11/30 16:57:57 [main]: DEBUG security.UserGroupInformation: Using user: "UnixPrincipal: hduser" with name hduser15/11/30 16:57:57 [main]: DEBUG security.UserGroupInformation: User entry: "hduser"15/11/30 16:57:57 [main]: DEBUG security.UserGroupInformation: UGI loginUser:hduser (auth:SIMPLE)15/11/30 16:57:57 [main]: INFO hive.metastore: Trying to connect to metastore with URI thrift://localhost:908315/11/30 16:57:57 [main]: DEBUG security.Groups: Returning fetched groups for 'hduser'15/11/30 16:57:57 [main]: INFO hive.metastore: Connected to metastore.15/11/30 16:57:57 [main]: DEBUG : address: rhes564/50.140.197.217 isLoopbackAddress: false, with host 50.140.197.217 rhes56415/11/30 16:57:57 [main]: DEBUG hdfs.BlockReaderLocal: dfs.client.use.legacy.blockreader.local = false15/11/30 16:57:57 [main]: DEBUG hdfs.BlockReaderLocal: dfs.client.read.shortcircuit = false15/11/30 16:57:57 [main]: DEBUG hdfs.BlockReaderLocal: dfs.client.domain.socket.data.traffic = false15/11/30 16:57:57 [main]: DEBUG hdfs.BlockReaderLocal: dfs.domain.socket.path =15/11/30 16:57:57 [main]: DEBUG hdfs.DFSClient: No KeyProvider found.15/11/30 16:57:57 [main]: DEBUG retry.RetryUtils: multipleLinearRandomRetry = null15/11/30 16:57:57 [main]: DEBUG ipc.Server: rpcKind=RPC_PROTOCOL_BUFFER, rpcRequestWrapperClass=class org.apache.hadoop.ipc.ProtobufRpcEngine$RpcRequestWrapper, rpcInvoker=org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker@5ccc001715/11/30 16:57:57 [main]: DEBUG ipc.Client: getting client out of cache: org.apache.hadoop.ipc.Client@15bf218e15/11/30 16:57:57 [main]: DEBUG util.PerformanceAdvisory: Both short-circuit local reads and UNIX domain socket are disabled.15/11/30 16:57:57 [main]: DEBUG sasl.DataTransferSaslUtil: DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection15/11/30 16:57:57 [main]: DEBUG ipc.Client: The ping interval is 60000 ms.15/11/30 16:57:57 [main]: DEBUG ipc.Client: Connecting to rhes564/50.140.197.217:900015/11/30 16:57:57 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser: starting, having connections 115/11/30 16:57:57 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #015/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #015/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 44ms15/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #115/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #115/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:58 [main]: DEBUG session.SessionState: HDFS root scratch dir: /tmp/hive with schema null, permission: rwx-wx-wx15/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #215/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #215/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:58 [main]: INFO session.SessionState: Created local directory: /tmp/hive/dec45671-0b5f-4496-8b3a-66f097d0c6f0_resources15/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #315/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #315/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 0ms15/11/30 16:57:58 [main]: DEBUG hdfs.DFSClient: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0: masked=rwx------15/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #415/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #415/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: mkdirs took 2ms15/11/30 16:57:58 [main]: INFO session.SessionState: Created HDFS directory: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f015/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #515/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #515/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:58 [main]: INFO session.SessionState: Created local directory: /tmp/hive/dec45671-0b5f-4496-8b3a-66f097d0c6f015/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #615/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #615/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 0ms15/11/30 16:57:58 [main]: DEBUG hdfs.DFSClient: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/_tmp_space.db: masked=rwx------15/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #715/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #715/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: mkdirs took 1ms15/11/30 16:57:58 [main]: INFO session.SessionState: Created HDFS directory: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/_tmp_space.db15/11/30 16:57:58 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #815/11/30 16:57:58 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #815/11/30 16:57:58 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 0ms15/11/30 16:57:58 [main]: DEBUG CliDriver: CliDriver inited with classpath /apps/sybase/sqlserver/1570/jConnect-7_0/classes/jconn4.jar:/apps/sybase/sqlserver/1570/jConnect-7_0/classes:/home/hduser/hive/javawork/classes:/usr/lib/hive/conf:/usr/lib/hive/lib/ST4-4.0.4.jar:/usr/lib/hive/lib/accumulo-core-1.6.0.jar:/usr/lib/hive/lib/accumulo-fate-1.6.0.jar:/usr/lib/hive/lib/accumulo-start-1.6.0.jar:/usr/lib/hive/lib/accumulo-trace-1.6.0.jar:/usr/lib/hive/lib/activation-1.1.jar:/usr/lib/hive/lib/ant-1.9.1.jar:/usr/lib/hive/lib/ant-launcher-1.9.1.jar:/usr/lib/hive/lib/antlr-2.7.7.jar:/usr/lib/hive/lib/antlr-runtime-3.4.jar:/usr/lib/hive/lib/apache-log4j-extras-1.2.17.jar:/usr/lib/hive/lib/asm-commons-3.1.jar:/usr/lib/hive/lib/asm-tree-3.1.jar:/usr/lib/hive/lib/avro-1.7.5.jar:/usr/lib/hive/lib/bonecp-0.8.0.RELEASE.jar:/usr/lib/hive/lib/calcite-avatica-1.2.0-incubating.jar:/usr/lib/hive/lib/calcite-core-1.2.0-incubating.jar:/usr/lib/hive/lib/calcite-linq4j-1.2.0-incubating.jar:/usr/lib/hive/lib/commons-beanutils-1.7.0.jar:/usr/lib/hive/lib/commons-beanutils-core-1.8.0.jar:/usr/lib/hive/lib/commons-cli-1.2.jar:/usr/lib/hive/lib/commons-codec-1.4.jar:/usr/lib/hive/lib/commons-collections-3.2.1.jar:/usr/lib/hive/lib/commons-compiler-2.7.6.jar:/usr/lib/hive/lib/commons-compress-1.4.1.jar:/usr/lib/hive/lib/commons-configuration-1.6.jar:/usr/lib/hive/lib/commons-dbcp-1.4.jar:/usr/lib/hive/lib/commons-digester-1.8.jar:/usr/lib/hive/lib/commons-httpclient-3.0.1.jar:/usr/lib/hive/lib/commons-io-2.4.jar:/usr/lib/hive/lib/commons-lang-2.6.jar:/usr/lib/hive/lib/commons-logging-1.1.3.jar:/usr/lib/hive/lib/commons-math-2.1.jar:/usr/lib/hive/lib/commons-pool-1.5.4.jar:/usr/lib/hive/lib/commons-vfs2-2.0.jar:/usr/lib/hive/lib/curator-client-2.6.0.jar:/usr/lib/hive/lib/curator-framework-2.6.0.jar:/usr/lib/hive/lib/curator-recipes-2.6.0.jar:/usr/lib/hive/lib/datanucleus-api-jdo-3.2.6.jar:/usr/lib/hive/lib/datanucleus-core-3.2.10.jar:/usr/lib/hive/lib/datanucleus-rdbms-3.2.9.jar:/usr/lib/hive/lib/derby-10.10.2.0.jar:/usr/lib/hive/lib/eigenbase-properties-1.1.5.jar:/usr/lib/hive/lib/geronimo-annotation_1.0_spec-1.1.1.jar:/usr/lib/hive/lib/geronimo-jaspic_1.0_spec-1.0.jar:/usr/lib/hive/lib/geronimo-jta_1.1_spec-1.1.1.jar:/usr/lib/hive/lib/groovy-all-2.1.6.jar:/usr/lib/hive/lib/guava-14.0.1.jar:/usr/lib/hive/lib/hamcrest-core-1.1.jar:/usr/lib/hive/lib/hive-accumulo-handler-1.2.1.jar:/usr/lib/hive/lib/hive-ant-1.2.1.jar:/usr/lib/hive/lib/hive-beeline-1.2.1.jar:/usr/lib/hive/lib/hive-cli-1.2.1.jar:/usr/lib/hive/lib/hive-common-1.2.1.jar:/usr/lib/hive/lib/hive-contrib-1.2.1.jar:/usr/lib/hive/lib/hive-exec-1.2.1.jar:/usr/lib/hive/lib/hive-hbase-handler-1.2.1.jar:/usr/lib/hive/lib/hive-hwi-1.2.1.jar:/usr/lib/hive/lib/hive-jdbc-1.2.1-standalone.jar:/usr/lib/hive/lib/hive-jdbc-1.2.1.jar:/usr/lib/hive/lib/hive-metastore-1.2.1.jar:/usr/lib/hive/lib/hive-serde-1.2.1.jar:/usr/lib/hive/lib/hive-service-1.2.1.jar:/usr/lib/hive/lib/hive-shims-0.20S-1.2.1.jar:/usr/lib/hive/lib/hive-shims-0.23-1.2.1.jar:/usr/lib/hive/lib/hive-shims-1.2.1.jar:/usr/lib/hive/lib/hive-shims-common-1.2.1.jar:/usr/lib/hive/lib/hive-shims-scheduler-1.2.1.jar:/usr/lib/hive/lib/hive-testutils-1.2.1.jar:/usr/lib/hive/lib/httpclient-4.4.jar:/usr/lib/hive/lib/httpcore-4.4.jar:/usr/lib/hive/lib/ivy-2.4.0.jar:/usr/lib/hive/lib/janino-2.7.6.jar:/usr/lib/hive/lib/jcommander-1.32.jar:/usr/lib/hive/lib/jconn4.jar:/usr/lib/hive/lib/jdo-api-3.0.1.jar:/usr/lib/hive/lib/jetty-all-7.6.0.v20120127.jar:/usr/lib/hive/lib/jetty-all-server-7.6.0.v20120127.jar:/usr/lib/hive/lib/jline-2.12.jar:/usr/lib/hive/lib/joda-time-2.5.jar:/usr/lib/hive/lib/jpam-1.1.jar:/usr/lib/hive/lib/json-20090211.jar:/usr/lib/hive/lib/jsr305-3.0.0.jar:/usr/lib/hive/lib/jta-1.1.jar:/usr/lib/hive/lib/junit-4.11.jar:/usr/lib/hive/lib/libfb303-0.9.2.jar:/usr/lib/hive/lib/libthrift-0.9.2.jar:/usr/lib/hive/lib/log4j-1.2.16.jar:/usr/lib/hive/lib/mail-1.4.1.jar:/usr/lib/hive/lib/maven-scm-api-1.4.jar:/usr/lib/hive/lib/maven-scm-provider-svn-commons-1.4.jar:/usr/lib/hive/lib/maven-scm-provider-svnexe-1.4.jar:/usr/lib/hive/lib/netty-3.7.0.Final.jar:/usr/lib/hive/lib/ojdbc6.jar:/usr/lib/hive/lib/opencsv-2.3.jar:/usr/lib/hive/lib/oro-2.0.8.jar:/usr/lib/hive/lib/paranamer-2.3.jar:/usr/lib/hive/lib/parquet-hadoop-bundle-1.6.0.jar:/usr/lib/hive/lib/pentaho-aggdesigner-algorithm-5.1.5-jhyde.jar:/usr/lib/hive/lib/plexus-utils-1.5.6.jar:/usr/lib/hive/lib/regexp-1.3.jar:/usr/lib/hive/lib/servlet-api-2.5.jar:/usr/lib/hive/lib/snappy-java-1.0.5.jar:/usr/lib/hive/lib/spark-assembly-1.5.2-hadoop2.4.0.jar:/usr/lib/hive/lib/stax-api-1.0.1.jar:/usr/lib/hive/lib/stringtemplate-3.2.1.jar:/usr/lib/hive/lib/super-csv-2.2.0.jar:/usr/lib/hive/lib/tempus-fugit-1.1.jar:/usr/lib/hive/lib/velocity-1.5.jar:/usr/lib/hive/lib/xz-1.0.jar:/usr/lib/hive/lib/zookeeper-3.4.6.jar:/usr/lib/spark/lib/spark-assembly-1.5.2-hadoop2.6.0.jar::/usr/lib/hbase/conf:/usr/lib/hbase/lib/hbase-protocol-1.0.0.jar:/usr/lib/hbase/lib/hbase-client-1.0.0.jar:/usr/lib/hbase/lib/htrace-core-3.1.0-incubating.jar:/usr/lib/hbase/lib/hbase-server-1.0.0.jar:/usr/lib/hbase/lib/hbase-hadoop-compat-1.0.0.jar:/usr/lib/hbase/lib/hbase-common-1.0.0.jar:/usr/lib/hbase/lib/netty-all-4.0.23.Final.jar:/home/hduser/hadoop-2.6.0/contrib/capacity-scheduler/*.jar:/home/hduser/hadoop-2.6.0/etc/hadoop:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/curator-framework-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-logging-1.1.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/paranamer-2.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/avro-1.7.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/stax-api-1.0-2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-configuration-1.6.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jasper-compiler-5.5.23.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/gson-2.2.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/mockito-all-1.8.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-net-3.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/xz-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-cli-1.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-el-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/htrace-core-3.0.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/netty-3.6.2.Final.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jersey-core-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/slf4j-api-1.7.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/activation-1.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/curator-recipes-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jetty-6.1.26.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jasper-runtime-5.5.23.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-httpclient-3.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/zookeeper-3.4.6.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/junit-4.11.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/log4j-1.2.17.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-codec-1.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jettison-1.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-collections-3.2.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jetty-util-6.1.26.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/hamcrest-core-1.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-math3-3.1.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-digester-1.8.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jsr305-1.3.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/servlet-api-2.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jsp-api-2.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/hadoop-auth-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/curator-client-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/asm-3.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/httpclient-4.2.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/hadoop-annotations-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jets3t-0.9.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/httpcore-4.2.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jersey-server-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-compress-1.4.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/xmlenc-0.52.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-io-2.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/commons-lang-2.6.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jsch-0.1.42.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jersey-json-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/lib/guava-11.0.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/hadoop-nfs-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/hadoop-common-2.6.0-tests.jar:/home/hduser/hadoop-2.6.0/share/hadoop/common/hadoop-common-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-el-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/htrace-core-3.0.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jasper-runtime-5.5.23.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jsr305-1.3.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jsp-api-2.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/asm-3.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-io-2.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/lib/guava-11.0.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/hadoop-hdfs-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/hadoop-hdfs-nfs-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/hdfs/hadoop-hdfs-2.6.0-tests.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/guice-3.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/xz-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-cli-1.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/javax.inject-1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jersey-core-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/activation-1.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jline-0.9.94.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jetty-6.1.26.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/aopalliance-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-httpclient-3.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/zookeeper-3.4.6.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/log4j-1.2.17.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-codec-1.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jettison-1.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-collections-3.2.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jersey-client-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jsr305-1.3.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/servlet-api-2.5.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/asm-3.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jersey-server-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-io-2.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/commons-lang-2.6.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jersey-json-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/lib/guava-11.0.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-server-tests-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-api-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-server-common-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-client-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-server-applicationhistoryservice-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-registry-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/yarn/hadoop-yarn-common-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/guice-3.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/xz-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/javax.inject-1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/junit-4.11.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/hamcrest-core-1.3.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/leveldbjni-all-1.8.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/asm-3.2.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/hadoop-annotations-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/commons-io-2.4.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/lib/jackson-core-asl-1.9.13.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.6.0-tests.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.6.0.jar:/home/hduser/hadoop-2.6.0/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.6.0.jar15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: /usr/lib/spark15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: spark15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: spark://rhes564:707715/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: true15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: /usr/lib/spark/logs15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: 512m15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on: org.apache.spark.serializer.KryoSerializer15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=Driver.run from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=TimeToSubmit from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=compile from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on:use asehadoop15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=parse from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO parse.ParseDriver: Parsing command:use asehadoop15/11/30 16:57:58 [main]: INFO parse.ParseDriver: Parse Completed15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=parse start=1448902678093 end=1448902678719 duration=626 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: DEBUG ql.Driver: Encoding valid txns info 9223372036854775807:15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=semanticAnalyze from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO ql.Driver: Semantic Analysis Completed15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=semanticAnalyze start=1448902678722 end=1448902678762 duration=40 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO ql.Driver: Returning Hive schema: Schema(fieldSchemas:null, properties:null)15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=compile start=1448902678069 end=1448902678769 duration=700 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO metadata.Hive: Dumping metastore api call timing information for : compilation phase15/11/30 16:57:58 [main]: DEBUG metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(HiveConf, )=0, getDatabase_(String, )=6, getAllDatabases_()=17, getFunctions_(String, String, )=10}15/11/30 16:57:58 [main]: INFO ql.Driver: Concurrency mode is disabled, not creating a lock manager15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=Driver.execute from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO ql.Driver: Starting command(queryId=hduser_20151130165758_735f7d9e-dee9-4f20-ab5c-77af628f079d):use asehadoop15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=TimeToSubmit start=1448902678069 end=1448902678793 duration=724 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=runTasks from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=task.DDL.Stage-0 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO ql.Driver: Starting task [Stage-0:DDL] in serial mode15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=runTasks start=1448902678793 end=1448902678803 duration=10 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO metadata.Hive: Dumping metastore api call timing information for : execution phase15/11/30 16:57:58 [main]: DEBUG metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(HiveConf, )=2, getDatabase_(String, )=4}15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=Driver.execute start=1448902678769 end=1448902678803 duration=34 from=org.apache.hadoop.hive.ql.Driver>OK15/11/30 16:57:58 [main]: INFO ql.Driver: OK15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=releaseLocks from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=releaseLocks start=1448902678803 end=1448902678803 duration=0 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=Driver.run start=1448902678069 end=1448902678803 duration=734 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: DEBUG ql.Driver: Shutting down queryuse asehadoopTime taken: 0.737 seconds15/11/30 16:57:58 [main]: INFO CliDriver: Time taken: 0.737 seconds15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=Driver.run from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=TimeToSubmit from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=compile from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: DEBUG parse.VariableSubstitution: Substitution is on:select count(1) from t15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=parse from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO parse.ParseDriver: Parsing command:select count(1) from t15/11/30 16:57:58 [main]: INFO parse.ParseDriver: Parse Completed15/11/30 16:57:58 [main]: INFO log.PerfLogger: </PERFLOG method=parse start=1448902678805 end=1448902678816 duration=11 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: DEBUG ql.Driver: Encoding valid txns info 9223372036854775807:15/11/30 16:57:58 [main]: INFO log.PerfLogger: <PERFLOG method=semanticAnalyze from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:58 [main]: INFO parse.CalcitePlanner: Starting Semantic Analysis15/11/30 16:57:59 [main]: DEBUG exec.FunctionRegistry: Looking up GenericUDAF: count15/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Completed phase 1 of Semantic Analysis15/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Get metadata for source tables15/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Get metadata for subqueries15/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Get metadata for destination tables15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #915/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #915/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getEZForPath took 1ms15/11/30 16:57:59 [main]: DEBUG hdfs.DFSClient: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1: masked=rwx------15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1015/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1015/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: mkdirs took 5ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1115/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1115/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [main]: INFO ql.Context: New scratch dir is hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-115/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Completed getting MetaData in Semantic Analysis15/11/30 16:57:59 [main]: INFO parse.BaseSemanticAnalyzer: Not invoking CBO because the statement has too few joins15/11/30 16:57:59 [main]: DEBUG hive.log: DDL: struct t { varchar(30) owner, varchar(30) object_name, varchar(30) subobject_name, i64 object_id, i64 data_object_id, varchar(19) object_type, timestamp created, timestamp last_ddl_time, varchar(19) timestamp, varchar(7) status, varchar(1) temporary2, varchar(1) generated, varchar(1) secondary, i64 namespace, varchar(30) edition_name, varchar(4000) padding1, varchar(3500) padding2, varchar(32) attribute, i32 op_type, timestamp op_time}15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Created Table Plan for t TS[0]15/11/30 16:57:59 [main]: DEBUG exec.FunctionRegistry: Looking up GenericUDAF: count15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: RR before GB t{(owner,owner: varchar(30))(object_name,object_name: varchar(30))(subobject_name,subobject_name: varchar(30))(object_id,object_id: bigint)(data_object_id,data_object_id: bigint)(object_type,object_type: varchar(19))(created,created: timestamp)(last_ddl_time,last_ddl_time: timestamp)(timestamp,timestamp: varchar(19))(status,status: varchar(7))(temporary2,temporary2: varchar(1))(generated,generated: varchar(1))(secondary,secondary: varchar(1))(namespace,namespace: bigint)(edition_name,edition_name: varchar(30))(padding1,padding1: varchar(4000))(padding2,padding2: varchar(3500))(attribute,attribute: varchar(32))(op_type,op_type: int)(op_time,op_time: timestamp)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct<transactionid:bigint,bucketid:int,rowid:bigint>)}  after GB {((tok_function count 1),_col0: bigint)}15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: tree: (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: genSelectPlan: input = {((tok_function count 1),_col0: bigint)}  starRr = t{(owner,owner: varchar(30))(object_name,object_name: varchar(30))(subobject_name,subobject_name: varchar(30))(object_id,object_id: bigint)(data_object_id,data_object_id: bigint)(object_type,object_type: varchar(19))(created,created: timestamp)(last_ddl_time,last_ddl_time: timestamp)(timestamp,timestamp: varchar(19))(status,status: varchar(7))(temporary2,temporary2: varchar(1))(generated,generated: varchar(1))(secondary,secondary: varchar(1))(namespace,namespace: bigint)(edition_name,edition_name: varchar(30))(padding1,padding1: varchar(4000))(padding2,padding2: varchar(3500))(attribute,attribute: varchar(32))(op_type,op_type: int)(op_time,op_time: timestamp)(block__offset__inside__file,BLOCK__OFFSET__INSIDE__FILE: bigint)(input__file__name,INPUT__FILE__NAME: string)(row__id,ROW__ID: struct<transactionid:bigint,bucketid:int,rowid:bigint>)}15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Created Select Plan row schema: null{(_c0,_col0: bigint)}15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Created Select Plan for clause: insclause-015/11/30 16:57:59 [main]: DEBUG ql.Context: Created staging dir = hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000/.hive-staging_hive_2015-11-30_16-57-58_805_2366423616530225987-1 for path = hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-1000015/11/30 16:57:59 [main]: INFO common.FileUtils: Creating directory if it doesn't exist: hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000/.hive-staging_hive_2015-11-30_16-57-58_805_2366423616530225987-115/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1215/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1215/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 4ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1315/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1315/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1415/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1415/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1515/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1515/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [main]: DEBUG hdfs.DFSClient: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000/.hive-staging_hive_2015-11-30_16-57-58_805_2366423616530225987-1: masked=rwxr-xr-x15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1615/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1615/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: mkdirs took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1715/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1715/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 0ms15/11/30 16:57:59 [main]: DEBUG shims.HadoopShimsSecure: {-chgrp,-R,supergroup,hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000}15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1815/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1815/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #1915/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #1915/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getListing took 0ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2015/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2015/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getListing took 1ms15/11/30 16:57:59 [main]: DEBUG shims.HadoopShimsSecure: Return value is :015/11/30 16:57:59 [main]: DEBUG shims.HadoopShimsSecure: {-chmod,-R,700,hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000}15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2115/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2115/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2215/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2215/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: setPermission took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2315/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2315/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getListing took 1ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2415/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2415/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: setPermission took 3ms15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2515/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2515/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getListing took 1ms15/11/30 16:57:59 [main]: DEBUG shims.HadoopShimsSecure: Return value is :015/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2615/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2615/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 0ms15/11/30 16:57:59 [main]: DEBUG shims.HadoopShimsSecure: FileStatus{path=hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000; isDirectory=true; modification_time=1448902679395; access_time=0; owner=hduser; group=supergroup; permission=rwx------; isSymlink=false}15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2715/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2715/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getFileInfo took 1ms15/11/30 16:57:59 [main]: DEBUG lazy.LazySimpleSerDe: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe initialized with: columnNames=[_col0] columnTypes=[bigint] separator=[[B@170807fb] nullstring=\N lastColumnTakesRest=false timestampFormats=null15/11/30 16:57:59 [main]: DEBUG lazy.LazySimpleSerDe: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe initialized with: columnNames=[_col0] columnTypes=[bigint] separator=[[B@6e30562b] nullstring=\N lastColumnTakesRest=false timestampFormats=null15/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Set stats collection dir : hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000/.hive-staging_hive_2015-11-30_16-57-58_805_2366423616530225987-1/-ext-1000215/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Created FileSink Plan for clause: insclause-0dest_path: hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10000 row schema: null{(_c0,_col0: bigint)}15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Created Body Plan for Query Block null15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Created Plan for Query Block null15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: Before logical optimizationTS[0]-SEL[1]-GBY[2]-RS[3]-GBY[4]-SEL[5]-FS[6]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(owner: varchar(30)|{t}owner,object_name: varchar(30)|{t}object_name,subobject_name: varchar(30)|{t}subobject_name,object_id: bigint|{t}object_id,data_object_id: bigint|{t}data_object_id,object_type: varchar(19)|{t}object_type,created: timestamp|{t}created,last_ddl_time: timestamp|{t}last_ddl_time,timestamp: varchar(19)|{t}timestamp,status: varchar(7)|{t}status,temporary2: varchar(1)|{t}temporary2,generated: varchar(1)|{t}generated,secondary: varchar(1)|{t}secondary,namespace: bigint|{t}namespace,edition_name: varchar(30)|{t}edition_name,padding1: varchar(4000)|{t}padding1,padding2: varchar(3500)|{t}padding2,attribute: varchar(32)|{t}attribute,op_type: int|{t}op_type,op_time: timestamp|{t}op_time,BLOCK__OFFSET__INSIDE__FILE: bigint|{t}block__offset__inside__file,INPUT__FILE__NAME: string|{t}input__file__name,ROW__ID: struct<transactionid:bigint,bucketid:int,rowid:bigint>|{t}row__id)15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator TS[0]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(owner: varchar(30)|{t}owner,object_name: varchar(30)|{t}object_name,subobject_name: varchar(30)|{t}subobject_name,object_id: bigint|{t}object_id,data_object_id: bigint|{t}data_object_id,object_type: varchar(19)|{t}object_type,created: timestamp|{t}created,last_ddl_time: timestamp|{t}last_ddl_time,timestamp: varchar(19)|{t}timestamp,status: varchar(7)|{t}status,temporary2: varchar(1)|{t}temporary2,generated: varchar(1)|{t}generated,secondary: varchar(1)|{t}secondary,namespace: bigint|{t}namespace,edition_name: varchar(30)|{t}edition_name,padding1: varchar(4000)|{t}padding1,padding2: varchar(3500)|{t}padding2,attribute: varchar(32)|{t}attribute,op_type: int|{t}op_type,op_time: timestamp|{t}op_time,BLOCK__OFFSET__INSIDE__FILE: bigint|{t}block__offset__inside__file,INPUT__FILE__NAME: string|{t}input__file__name,ROW__ID: struct<transactionid:bigint,bucketid:int,rowid:bigint>|{t}row__id)15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator SEL[1]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcFactory: New column list:(Column[owner] Column[object_name] Column[subobject_name] Column[object_id] Column[data_object_id] Column[object_type] Column[created] Column[last_ddl_time] Column[timestamp] Column[status] Column[temporary2] Column[generated] Column[secondary] Column[namespace] Column[edition_name] Column[padding1] Column[padding2] Column[attribute] Column[op_type] Column[op_time] Column[BLOCK__OFFSET__INSIDE__FILE] Column[INPUT__FILE__NAME] Column[ROW__ID])15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: bigint|{}(tok_function count 1))15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator GBY[2]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: bigint|{}(tok_function count 1))15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator RS[3]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: bigint|{}(tok_function count 1))15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator GBY[4]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: bigint|{null}_c0)15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator SEL[5]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0])15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: bigint|{})15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator FS[6]15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for FS(6)15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for SEL(5)15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for GBY(4)15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for RS(3)15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for GBY(2)15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for SEL(1)15/11/30 16:57:59 [main]: INFO ppd.OpProcFactory: Processing for TS(0)15/11/30 16:57:59 [main]: DEBUG ppd.PredicatePushDown: After PPD:TS[0]-SEL[1]-GBY[2]-RS[3]-GBY[4]-SEL[5]-FS[6]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:TS[0] with rs:(owner: varchar(30)|{t}owner,object_name: varchar(30)|{t}object_name,subobject_name: varchar(30)|{t}subobject_name,object_id: bigint|{t}object_id,data_object_id: bigint|{t}data_object_id,object_type: varchar(19)|{t}object_type,created: timestamp|{t}created,last_ddl_time: timestamp|{t}last_ddl_time,timestamp: varchar(19)|{t}timestamp,status: varchar(7)|{t}status,temporary2: varchar(1)|{t}temporary2,generated: varchar(1)|{t}generated,secondary: varchar(1)|{t}secondary,namespace: bigint|{t}namespace,edition_name: varchar(30)|{t}edition_name,padding1: varchar(4000)|{t}padding1,padding2: varchar(3500)|{t}padding2,attribute: varchar(32)|{t}attribute,op_type: int|{t}op_type,op_time: timestamp|{t}op_time,BLOCK__OFFSET__INSIDE__FILE: bigint|{t}block__offset__inside__file,INPUT__FILE__NAME: string|{t}input__file__name,ROW__ID: struct<transactionid:bigint,bucketid:int,rowid:bigint>|{t}row__id)15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator TS[0]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[1] with rs:(owner: varchar(30)|{t}owner,object_name: varchar(30)|{t}object_name,subobject_name: varchar(30)|{t}subobject_name,object_id: bigint|{t}object_id,data_object_id: bigint|{t}data_object_id,object_type: varchar(19)|{t}object_type,created: timestamp|{t}created,last_ddl_time: timestamp|{t}last_ddl_time,timestamp: varchar(19)|{t}timestamp,status: varchar(7)|{t}status,temporary2: varchar(1)|{t}temporary2,generated: varchar(1)|{t}generated,secondary: varchar(1)|{t}secondary,namespace: bigint|{t}namespace,edition_name: varchar(30)|{t}edition_name,padding1: varchar(4000)|{t}padding1,padding2: varchar(3500)|{t}padding2,attribute: varchar(32)|{t}attribute,op_type: int|{t}op_type,op_time: timestamp|{t}op_time,BLOCK__OFFSET__INSIDE__FILE: bigint|{t}block__offset__inside__file,INPUT__FILE__NAME: string|{t}input__file__name,ROW__ID: struct<transactionid:bigint,bucketid:int,rowid:bigint>|{t}row__id)15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator SEL[1]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcFactory: New column list:(Column[owner] Column[object_name] Column[subobject_name] Column[object_id] Column[data_object_id] Column[object_type] Column[created] Column[last_ddl_time] Column[timestamp] Column[status] Column[temporary2] Column[generated] Column[secondary] Column[namespace] Column[edition_name] Column[padding1] Column[padding2] Column[attribute] Column[op_type] Column[op_time] Column[BLOCK__OFFSET__INSIDE__FILE] Column[INPUT__FILE__NAME] Column[ROW__ID])15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[2] with rs:(_col0: bigint|{}(tok_function count 1))15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator GBY[2]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:RS[3] with rs:(VALUE._col0: bigint|{}(tok_function count 1))15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator RS[3]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:GBY[4] with rs:(_col0: bigint|{}(tok_function count 1))15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator GBY[4]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:SEL[5] with rs:(_col0: bigint|{null}_c0)15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator SEL[5]15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcFactory: New column list:(Column[_col0])15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Getting constants of op:FS[6] with rs:(_col0: bigint|{})15/11/30 16:57:59 [main]: DEBUG optimizer.ConstantPropagateProcCtx: Offerring constants [] to operator FS[6]15/11/30 16:57:59 [main]: DEBUG optimizer.ColumnPrunerProcFactory: Reduce Sink Operator 3 key:[]15/11/30 16:57:59 [main]: INFO optimizer.ColumnPrunerProcFactory: RS 3 oldColExprMap: {VALUE._col0=Column[_col0]}15/11/30 16:57:59 [main]: INFO optimizer.ColumnPrunerProcFactory: RS 3 newColExprMap: {VALUE._col0=Column[_col0]}15/11/30 16:57:59 [main]: DEBUG optimizer.IdentityProjectRemover: Identity project remover optimization removed : SEL[5]15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=partition-retrieving from=org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner>15/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=partition-retrieving start=1448902679650 end=1448902679653 duration=3 from=org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner>15/11/30 16:57:59 [main]: DEBUG exec.TableScanOperator: Setting stats (Num rows: 1767886 Data size: 67670776 Basic stats: COMPLETE Column stats: COMPLETE) on TS[0]15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [0] STATS-TS[0] (t):  numRows: 1767886 dataSize: 67670776 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {}15/11/30 16:57:59 [main]: DEBUG exec.SelectOperator: Setting stats (Num rows: 1767886 Data size: 67670776 Basic stats: COMPLETE Column stats: COMPLETE) on SEL[1]15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [0] STATS-SEL[1]:  numRows: 1767886 dataSize: 67670776 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {}15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: STATS-GBY[2]: inputSize: 67670776 maxSplitSize: 256000000 parallelism: 1 containsGroupingSet: false sizeOfGroupingSet: 115/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: STATS-GBY[2] hashAgg: true15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [Case 3] STATS-GBY[2]: cardinality: 115/11/30 16:57:59 [main]: DEBUG exec.GroupByOperator: Setting stats (Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE) on GBY[2]15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [0] STATS-GBY[2]:  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col0= colName: _col0 colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 isPrimaryKey: false}15/11/30 16:57:59 [main]: DEBUG exec.ReduceSinkOperator: Setting stats (Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE) on RS[3]15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [0] STATS-RS[3]:  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {VALUE._col0= colName: VALUE._col0 colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 isPrimaryKey: false}15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: STATS-GBY[4]: inputSize: 1 maxSplitSize: 256000000 parallelism: 1 containsGroupingSet: false sizeOfGroupingSet: 115/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: STATS-GBY[4] hashAgg: false15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [Case 9] STATS-GBY[4]: cardinality: 115/11/30 16:57:59 [main]: DEBUG exec.GroupByOperator: Setting stats (Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE) on GBY[4]15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [0] STATS-GBY[4]:  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col0= colName: _col0 colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 isPrimaryKey: false}15/11/30 16:57:59 [main]: DEBUG annotation.StatsRulesProcFactory: [0] STATS-FS[6]:  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE colStats: {_col0= colName: _col0 colType: bigint countDistincts: 1 numNulls: 0 avgColLen: 8.0 numTrues: 0 numFalses: 0 isPrimaryKey: false}15/11/30 16:57:59 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2815/11/30 16:57:59 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2815/11/30 16:57:59 [main]: DEBUG ipc.ProtobufRpcEngine: Call: getListing took 1ms15/11/30 16:57:59 [main]: DEBUG exec.TableScanOperator: Setting traits (org.apache.hadoop.hive.ql.plan.OpTraits@74a19718) on TS[0]15/11/30 16:57:59 [main]: DEBUG exec.SelectOperator: Setting traits (org.apache.hadoop.hive.ql.plan.OpTraits@1cc6e47a) on SEL[1]15/11/30 16:57:59 [main]: DEBUG exec.GroupByOperator: Setting traits (org.apache.hadoop.hive.ql.plan.OpTraits@45c21458) on GBY[2]15/11/30 16:57:59 [main]: DEBUG exec.ReduceSinkOperator: Setting traits (org.apache.hadoop.hive.ql.plan.OpTraits@48c5c12f) on RS[3]15/11/30 16:57:59 [main]: DEBUG exec.GroupByOperator: Setting traits (org.apache.hadoop.hive.ql.plan.OpTraits@2f0e19fc) on GBY[4]15/11/30 16:57:59 [main]: DEBUG exec.FileSinkOperator: Setting traits (org.apache.hadoop.hive.ql.plan.OpTraits@2f0e19fc) on FS[6]15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: After logical optimizationTS[0]-SEL[1]-GBY[2]-RS[3]-GBY[4]-FS[6]15/11/30 16:57:59 [main]: DEBUG security.Groups: Returning cached groups for 'hduser'15/11/30 16:57:59 [main]: DEBUG security.Groups: Returning cached groups for 'hduser'15/11/30 16:57:59 [main]: DEBUG session.SessionState: Session is using authorization class class org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=SparkOptimizeOperatorTree from=org.apache.hadoop.hive.ql.parse.spark.SparkCompiler>15/11/30 16:57:59 [main]: INFO spark.SetSparkReducerParallelism: Number of reducers determined to be: 115/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=SparkOptimizeOperatorTree start=1448902679679 end=1448902679685 duration=6 from=org.apache.hadoop.hive.ql.parse.spark.SparkCompiler>15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=SparkGenerateTaskTree from=org.apache.hadoop.hive.ql.parse.spark.SparkCompiler>15/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: Root operator: TS[0]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: Leaf operator: RS[3]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkUtils: Adding map work (Map 1) for TS[0]15/11/30 16:57:59 [main]: DEBUG hive.log: DDL: struct t { varchar(30) owner, varchar(30) object_name, varchar(30) subobject_name, i64 object_id, i64 data_object_id, varchar(19) object_type, timestamp created, timestamp last_ddl_time, varchar(19) timestamp, varchar(7) status, varchar(1) temporary2, varchar(1) generated, varchar(1) secondary, i64 namespace, varchar(30) edition_name, varchar(4000) padding1, varchar(3500) padding2, varchar(32) attribute, i32 op_type, timestamp op_time}15/11/30 16:57:59 [main]: DEBUG hive.log: DDL: struct t { varchar(30) owner, varchar(30) object_name, varchar(30) subobject_name, i64 object_id, i64 data_object_id, varchar(19) object_type, timestamp created, timestamp last_ddl_time, varchar(19) timestamp, varchar(7) status, varchar(1) temporary2, varchar(1) generated, varchar(1) secondary, i64 namespace, varchar(30) edition_name, varchar(4000) padding1, varchar(3500) padding2, varchar(32) attribute, i32 op_type, timestamp op_time}15/11/30 16:57:59 [main]: DEBUG hive.log: DDL: struct t { varchar(30) owner, varchar(30) object_name, varchar(30) subobject_name, i64 object_id, i64 data_object_id, varchar(19) object_type, timestamp created, timestamp last_ddl_time, varchar(19) timestamp, varchar(7) status, varchar(1) temporary2, varchar(1) generated, varchar(1) secondary, i64 namespace, varchar(30) edition_name, varchar(4000) padding1, varchar(3500) padding2, varchar(32) attribute, i32 op_type, timestamp op_time}15/11/30 16:57:59 [main]: DEBUG optimizer.GenMapRedUtils: Adding hdfs://rhes564:9000/user/hive/warehouse/asehadoop.db/t of tablet15/11/30 16:57:59 [main]: DEBUG optimizer.GenMapRedUtils: Information added for path hdfs://rhes564:9000/user/hive/warehouse/asehadoop.db/t15/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: First pass. Leaf operator: RS[3]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: Root operator: GBY[4]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: Leaf operator: FS[6]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkUtils: Adding reduce work (Reducer 2) for GBY[4]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkUtils: Setting up reduce sink: RS[3] with following reduce work: Reducer 215/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: Removing RS[3] as parent from GBY[4]15/11/30 16:57:59 [main]: DEBUG spark.GenSparkWork: First pass. Leaf operator: FS[6]15/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=SparkGenerateTaskTree start=1448902679686 end=1448902679703 duration=17 from=org.apache.hadoop.hive.ql.parse.spark.SparkCompiler>15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=SparkOptimizeTaskTree from=org.apache.hadoop.hive.ql.parse.spark.SparkCompiler>15/11/30 16:57:59 [main]: DEBUG parse.TaskCompiler: Skipping runtime skew join optimization15/11/30 16:57:59 [main]: DEBUG physical.NullScanTaskDispatcher: Looking at: Map 115/11/30 16:57:59 [main]: INFO physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable15/11/30 16:57:59 [main]: INFO physical.NullScanTaskDispatcher: Found 0 null table scans15/11/30 16:57:59 [main]: DEBUG physical.NullScanTaskDispatcher: Looking at: Map 115/11/30 16:57:59 [main]: INFO physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable15/11/30 16:57:59 [main]: INFO physical.NullScanTaskDispatcher: Found 0 null table scans15/11/30 16:57:59 [main]: DEBUG physical.NullScanTaskDispatcher: Looking at: Map 115/11/30 16:57:59 [main]: INFO physical.NullScanTaskDispatcher: Looking for table scans where optimization is applicable15/11/30 16:57:59 [main]: INFO physical.NullScanTaskDispatcher: Found 0 null table scans15/11/30 16:57:59 [main]: DEBUG parse.TaskCompiler: Skipping vectorization15/11/30 16:57:59 [main]: DEBUG parse.TaskCompiler: Skipping stage id rearranger15/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=SparkOptimizeTaskTree start=1448902679704 end=1448902679718 duration=14 from=org.apache.hadoop.hive.ql.parse.spark.SparkCompiler>15/11/30 16:57:59 [main]: INFO parse.CalcitePlanner: Completed plan generation15/11/30 16:57:59 [main]: INFO ql.Driver: Semantic Analysis Completed15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: validation start15/11/30 16:57:59 [main]: DEBUG parse.CalcitePlanner: not validating writeEntity, because entity is neither table nor partition15/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=semanticAnalyze start=1448902678816 end=1448902679719 duration=903 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:59 [main]: DEBUG lazy.LazySimpleSerDe: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe initialized with: columnNames=[_col0] columnTypes=[bigint] separator=[[B@51e08af7] nullstring=\N lastColumnTakesRest=false timestampFormats=null15/11/30 16:57:59 [main]: INFO exec.ListSinkOperator: Initializing operator OP[7]15/11/30 16:57:59 [main]: DEBUG lazy.LazySimpleSerDe: org.apache.hadoop.hive.serde2.DelimitedJSONSerDe initialized with: columnNames=[] columnTypes=[] separator=[[B@7bf2b1dc] nullstring=NULL lastColumnTakesRest=false timestampFormats=null15/11/30 16:57:59 [main]: INFO exec.ListSinkOperator: Initialization Done 7 OP15/11/30 16:57:59 [main]: INFO exec.ListSinkOperator: Operator 7 OP initialized15/11/30 16:57:59 [main]: INFO ql.Driver: Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:_c0, type:bigint, comment:null)], properties:null)15/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=compile start=1448902678804 end=1448902679762 duration=958 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:59 [main]: INFO metadata.Hive: Dumping metastore api call timing information for : compilation phase15/11/30 16:57:59 [main]: DEBUG metadata.Hive: Total time spent in each metastore function (ms): {isCompatibleWith_(HiveConf, )=1, getTable_(String, String, )=34}15/11/30 16:57:59 [main]: INFO ql.Driver: Concurrency mode is disabled, not creating a lock manager15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=Driver.execute from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:59 [main]: INFO ql.Driver: Starting command(queryId=hduser_20151130165758_727c5878-4db9-4a29-9416-61a394ac2773):select count(1) from tQuery ID = hduser_20151130165758_727c5878-4db9-4a29-9416-61a394ac277315/11/30 16:57:59 [main]: INFO ql.Driver: Query ID = hduser_20151130165758_727c5878-4db9-4a29-9416-61a394ac2773Total jobs = 115/11/30 16:57:59 [main]: INFO ql.Driver: Total jobs = 115/11/30 16:57:59 [main]: INFO log.PerfLogger: </PERFLOG method=TimeToSubmit start=1448902678804 end=1448902679763 duration=959 from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=runTasks from=org.apache.hadoop.hive.ql.Driver>15/11/30 16:57:59 [main]: INFO log.PerfLogger: <PERFLOG method=task.SPARK.Stage-1 from=org.apache.hadoop.hive.ql.Driver>Launching Job 1 out of 115/11/30 16:57:59 [main]: INFO ql.Driver: Launching Job 1 out of 115/11/30 16:57:59 [main]: INFO ql.Driver: Starting task [Stage-1:MAPRED] in serial modeIn order to change the average load for a reducer (in bytes):15/11/30 16:57:59 [main]: INFO spark.SparkTask: In order to change the average load for a reducer (in bytes):  set hive.exec.reducers.bytes.per.reducer=<number>15/11/30 16:57:59 [main]: INFO spark.SparkTask:   set hive.exec.reducers.bytes.per.reducer=<number>In order to limit the maximum number of reducers:15/11/30 16:57:59 [main]: INFO spark.SparkTask: In order to limit the maximum number of reducers:  set hive.exec.reducers.max=<number>15/11/30 16:57:59 [main]: INFO spark.SparkTask:   set hive.exec.reducers.max=<number>In order to set a constant number of reducers:15/11/30 16:57:59 [main]: INFO spark.SparkTask: In order to set a constant number of reducers:  set mapreduce.job.reduces=<number>15/11/30 16:57:59 [main]: INFO spark.SparkTask:   set mapreduce.job.reduces=<number>15/11/30 16:57:59 [main]: INFO session.SparkSessionManagerImpl: Setting up the session manager.15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.connect.timeout -> 1000).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.serializer -> org.apache.spark.serializer.KryoSerializer).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.eventLog.enabled -> true).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.eventLog.dir -> /usr/lib/spark/logs).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.rpc.threads -> 8).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.secret.bits -> 256).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.home -> /usr/lib/spark).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.rpc.max.size -> 52428800).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.master -> spark://rhes564:7077).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.executor.memory -> 512m).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.server.connect.timeout -> 90000).15/11/30 16:57:59 [main]: DEBUG logging.InternalLoggerFactory: Using SLF4J as the default logging framework15/11/30 16:57:59 [main]: DEBUG channel.MultithreadEventLoopGroup: -Dio.netty.eventLoopThreads: 2415/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent0: java.nio.Buffer.address: available15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent0: sun.misc.Unsafe.theUnsafe: available15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent0: sun.misc.Unsafe.copyMemory: available15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent0: java.nio.Bits.unaligned: true15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: Java version: 715/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: -Dio.netty.noUnsafe: false15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: sun.misc.Unsafe: available15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: -Dio.netty.noJavassist: false15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: Javassist: unavailable15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: You don't have Javassist in your class path or you don't have enough permission to load dynamically generated classes.  Please check the configuration for better performance.15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: -Dio.netty.tmpdir: /tmp (java.io.tmpdir)15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: -Dio.netty.bitMode: 64 (sun.arch.data.model)15/11/30 16:57:59 [main]: DEBUG internal.PlatformDependent: -Dio.netty.noPreferDirect: false15/11/30 16:57:59 [main]: DEBUG nio.NioEventLoop: -Dio.netty.noKeySetOptimization: false15/11/30 16:57:59 [main]: DEBUG nio.NioEventLoop: -Dio.netty.selectorAutoRebuildThreshold: 51215/11/30 16:57:59 [main]: DEBUG internal.ThreadLocalRandom: -Dio.netty.initialSeedUniquifier: 0xd8ebcbdc152b0b7b (took 0 ms)15/11/30 16:57:59 [main]: DEBUG buffer.ByteBufUtil: -Dio.netty.allocator.type: unpooled15/11/30 16:57:59 [main]: DEBUG buffer.ByteBufUtil: -Dio.netty.threadLocalDirectBufferSize: 6553615/11/30 16:57:59 [main]: DEBUG util.NetUtil: Loopback interface: lo (lo, 127.0.0.1)15/11/30 16:57:59 [main]: DEBUG util.NetUtil: /proc/sys/net/core/somaxconn: 12815/11/30 16:57:59 [main]: WARN rpc.RpcConfiguration: Your hostname, rhes564, resolves to a loopback address, but we couldn't find  any external IP address!15/11/30 16:57:59 [main]: WARN rpc.RpcConfiguration: Set hive.spark.client.server.address if you need to bind to another address.15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.connect.timeout -> 1000).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.serializer -> org.apache.spark.serializer.KryoSerializer).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.eventLog.enabled -> true).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.eventLog.dir -> /usr/lib/spark/logs).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.rpc.threads -> 8).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.secret.bits -> 256).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.home -> /usr/lib/spark).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.rpc.max.size -> 52428800).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.master -> spark://rhes564:7077).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load spark property from hive configuration (spark.executor.memory -> 512m).15/11/30 16:57:59 [main]: INFO spark.HiveSparkClientFactory: load RPC property from hive configuration (hive.spark.client.server.connect.timeout -> 90000).15/11/30 16:58:00 [main]: INFO client.SparkClientImpl: Running client driver with argv: /usr/lib/spark/bin/spark-submit --properties-file /tmp/spark-submit.9131386023383484888.properties --class org.apache.hive.spark.client.RemoteDriver /usr/lib/hive/lib/hive-exec-1.2.1.jar --remote-host rhes564 --remote-port 26896 --conf hive.spark.client.connect.timeout=1000 --conf hive.spark.client.server.connect.timeout=90000 --conf hive.spark.client.channel.log.level=null --conf hive.spark.client.rpc.max.size=52428800 --conf hive.spark.client.rpc.threads=8 --conf hive.spark.client.secret.bits=25615/11/30 16:58:00 [stderr-redir-1]: INFO client.SparkClientImpl: Warning: Ignoring non-spark config property: hive.spark.client.server.connect.timeout=9000015/11/30 16:58:00 [stderr-redir-1]: INFO client.SparkClientImpl: Warning: Ignoring non-spark config property: hive.spark.client.rpc.threads=815/11/30 16:58:00 [stderr-redir-1]: INFO client.SparkClientImpl: Warning: Ignoring non-spark config property: hive.spark.client.connect.timeout=100015/11/30 16:58:00 [stderr-redir-1]: INFO client.SparkClientImpl: Warning: Ignoring non-spark config property: hive.spark.client.secret.bits=25615/11/30 16:58:00 [stderr-redir-1]: INFO client.SparkClientImpl: Warning: Ignoring non-spark config property: hive.spark.client.rpc.max.size=5242880015/11/30 16:58:00 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:00 INFO client.RemoteDriver: Connecting to: rhes564:2689615/11/30 16:58:01 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:01 WARN rpc.Rpc: Invalid log level null, reverting to default.15/11/30 16:58:01 [RPC-Handler-3]: DEBUG util.ResourceLeakDetector: -Dio.netty.leakDetectionLevel: simple15/11/30 16:58:01 [RPC-Handler-3]: DEBUG util.Recycler: -Dio.netty.recycler.maxCapacity.default: 26214415/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Decoded message of type org.apache.hive.spark.client.rpc.Rpc$SaslMessage (41 bytes)15/11/30 16:58:01 [RPC-Handler-3]: DEBUG internal.Cleaner0: java.nio.ByteBuffer.cleaner(): available15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.RpcServer$SaslServerHandler: Handling SASL challenge message...15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.RpcServer$SaslServerHandler: Sending SASL challenge response...15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Encoded message of type org.apache.hive.spark.client.rpc.Rpc$SaslMessage (98 bytes)15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Decoded message of type org.apache.hive.spark.client.rpc.Rpc$SaslMessage (275 bytes)15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.RpcServer$SaslServerHandler: Handling SASL challenge message...15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.RpcServer$SaslServerHandler: Sending SASL challenge response...15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Encoded message of type org.apache.hive.spark.client.rpc.Rpc$SaslMessage (45 bytes)15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.RpcServer$SaslServerHandler: SASL negotiation finished with QOP auth.15/11/30 16:58:01 [main]: DEBUG session.SparkSessionManagerImpl: New session (30d627c3-a88d-4126-b616-944a6b7d327b) is created.15/11/30 16:58:01 [main]: INFO log.PerfLogger: <PERFLOG method=SparkSubmitJob from=org.apache.hadoop.hive.ql.exec.spark.SparkTask>15/11/30 16:58:01 [main]: DEBUG rpc.RpcDispatcher: [ClientProtocol] Registered outstanding rpc 0 (org.apache.hive.spark.client.BaseProtocol$SyncJobRequest).15/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Encoded message of type org.apache.hive.spark.client.rpc.Rpc$MessageHeader (5 bytes)15/11/30 16:58:01 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:01 INFO spark.SparkContext: Running Spark version 1.5.215/11/30 16:58:01 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Encoded message of type org.apache.hive.spark.client.BaseProtocol$SyncJobRequest (159 bytes)15/11/30 16:58:01 [main]: INFO ql.Context: New scratch dir is hdfs://rhes564:9000/tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-115/11/30 16:58:01 [main]: DEBUG hdfs.DFSClient: /tmp/hive/hduser/dec45671-0b5f-4496-8b3a-66f097d0c6f0/hive_2015-11-30_16-57-58_805_2366423616530225987-1/-mr-10003: masked=rwxr-xr-x15/11/30 16:58:01 [IPC Parameter Sending Thread #0]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser sending #2915/11/30 16:58:01 [IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser]: DEBUG ipc.Client: IPC Client (1997160570) connection to rhes564/50.140.197.217:9000 from hduser got value #2915/11/30 16:58:01 [main]: DEBUG ipc.ProtobufRpcEngine: Call: mkdirs took 3ms15/11/30 16:58:02 [main]: DEBUG rpc.RpcDispatcher: [ClientProtocol] Registered outstanding rpc 1 (org.apache.hive.spark.client.BaseProtocol$JobRequest).15/11/30 16:58:02 [main]: DEBUG client.SparkClientImpl: Send JobRequest[3d26e448-e721-49db-a667-298b335b3b7a].15/11/30 16:58:02 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Encoded message of type org.apache.hive.spark.client.rpc.Rpc$MessageHeader (5 bytes)15/11/30 16:58:02 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Encoded message of type org.apache.hive.spark.client.BaseProtocol$JobRequest (126802 bytes)15/11/30 16:58:02 [main]: INFO log.PerfLogger: </PERFLOG method=SparkSubmitJob start=1448902681932 end=1448902682056 duration=124 from=org.apache.hadoop.hive.ql.exec.spark.SparkTask>Starting Spark Job = 3d26e448-e721-49db-a667-298b335b3b7a15/11/30 16:58:02 [main]: INFO spark.SparkTask: Starting Spark Job = 3d26e448-e721-49db-a667-298b335b3b7a15/11/30 16:58:02 [main]: INFO log.PerfLogger: <PERFLOG method=SparkRunJob from=org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor>15/11/30 16:58:02 [main]: INFO log.PerfLogger: <PERFLOG method=SparkSubmitToRunning from=org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor>state = SENT15/11/30 16:58:02 [main]: INFO status.SparkJobMonitor: state = SENT15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.SecurityManager: Changing view acls to: hduser15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.SecurityManager: Changing modify acls to: hduser15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(hduser); users with modify permissions: Set(hduser)15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO slf4j.Slf4jLogger: Slf4jLogger started15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO Remoting: Starting remoting15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@50.140.197.217:25977]15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO util.Utils: Successfully started service 'sparkDriver' on port 25977.15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.SparkEnv: Registering MapOutputTracker15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.SparkEnv: Registering BlockManagerMaster15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO storage.DiskBlockManager: Created local directory at /tmp/blockmgr-9cb5b2ee-6388-4a3a-b661-92fc64abdd4615/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO storage.MemoryStore: MemoryStore started with capacity 529.9 MB15/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.HttpFileServer: HTTP File server directory is /tmp/spark-4a493a0a-746b-4db8-afc4-905a5926999a/httpd-302ea1fd-6573-4192-8907-6657a24e5e2215/11/30 16:58:02 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:02 INFO spark.HttpServer: Starting HTTP Server15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO server.Server: jetty-8.y.z-SNAPSHOT15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO server.AbstractConnector: Started SocketConnector@0.0.0.0:3980115/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO util.Utils: Successfully started service 'HTTP file server' on port 39801.15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO spark.SparkEnv: Registering OutputCommitCoordinatorstate = SENT15/11/30 16:58:03 [main]: INFO status.SparkJobMonitor: state = SENT15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO server.Server: jetty-8.y.z-SNAPSHOT15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO server.AbstractConnector: Started SelectChannelConnector@0.0.0.0:404015/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO util.Utils: Successfully started service 'SparkUI' on port 4040.15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO ui.SparkUI: Started SparkUI at http://50.140.197.217:404015/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO spark.SparkContext: Added JAR file:/usr/lib/hive/lib/hive-exec-1.2.1.jar at http://50.140.197.217:39801/jars/hive-exec-1.2.1.jar with timestamp 144890268329615/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 WARN metrics.MetricsSystem: Using default name DAGScheduler for source because spark.app.id is not set.15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO client.AppClient$ClientEndpoint: Connecting to master spark://rhes564:7077...15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO cluster.SparkDeploySchedulerBackend: Connected to Spark cluster with app ID app-20151130165803-000115/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO util.Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 24395.15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO netty.NettyBlockTransferService: Server created on 2439515/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO storage.BlockManagerMaster: Trying to register BlockManager15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO storage.BlockManagerMasterEndpoint: Registering block manager 50.140.197.217:24395 with 529.9 MB RAM, BlockManagerId(driver, 50.140.197.217, 24395)15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO storage.BlockManagerMaster: Registered BlockManager15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO scheduler.EventLoggingListener: Logging events to file:/usr/lib/spark/logs/app-20151130165803-000115/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO cluster.SparkDeploySchedulerBackend: SchedulerBackend is ready for scheduling beginning after reached minRegisteredResourcesRatio: 0.015/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO spark.SparkContext: Added JAR file:/usr/lib/hive/lib/hive-exec-1.2.1.jar at http://50.140.197.217:39801/jars/hive-exec-1.2.1.jar with timestamp 144890268385215/11/30 16:58:03 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Decoded message of type org.apache.hive.spark.client.rpc.Rpc$MessageHeader (5 bytes)15/11/30 16:58:03 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Decoded message of type org.apache.hive.spark.client.rpc.Rpc$NullMessage (2 bytes)15/11/30 16:58:03 [RPC-Handler-3]: DEBUG rpc.RpcDispatcher: [ClientProtocol] Received RPC message: type=REPLY id=0 payload=org.apache.hive.spark.client.rpc.Rpc$NullMessage15/11/30 16:58:03 [stderr-redir-1]: INFO client.SparkClientImpl: 15/11/30 16:58:03 INFO rpc.RpcDispatcher: [DriverProtocol] Closing channel due to exception in pipeline (java.lang.NoClassDefFoundError: org/apache/hive/spark/client/Job).15/11/30 16:58:03 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Decoded message of type org.apache.hive.spark.client.rpc.Rpc$MessageHeader (5 bytes)15/11/30 16:58:03 [RPC-Handler-3]: DEBUG rpc.KryoMessageCodec: Decoded message of type java.lang.String (3901 bytes)15/11/30 16:58:03 [RPC-Handler-3]: DEBUG rpc.RpcDispatcher: [ClientProtocol] Received RPC message: type=ERROR id=1 payload=java.lang.String15/11/30 16:58:03 [RPC-Handler-3]: WARN rpc.RpcDispatcher: Received error message:io.netty.handler.codec.DecoderException: java.lang.NoClassDefFoundError: org/apache/hive/spark/client/Job
  6. 0

    Trying to create feeder causes java.lang.NoSuchMethodError

    Google Groups | 6 months ago | Jonathan Vaughn
    java.lang.NoSuchMethodError: com.fasterxml.jackson.databind.ObjectMapper. readerFor(Ljava/lang/Class;)Lcom/fasterxml/jackson/databind/ObjectReader; means "the JVM could find the ObjectMapper class but it's missing the readFor <https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/ObjectMapper.java#L3351-L3360> method". You've done something (that's for you to figure out) that causes your project to ship a jackson-databind version that's older than the one expected by Gatling 2.2.2. *Stéphane Landelle* *GatlingCorp CEO* slan...@gatling.io On Thu, Aug 11, 2016 at 3:41 AM, Jonathan Vaughn <jona...@creatuity.com> wrote: This is in Gatling 2.2.2 I have a simple CSV I'm trying to use in the user-files/data directory, in the format of: loginEmail,loginPassword "test+s...@example.com","password" "test+s...@example.com","password2" ... (etc) Trying to create the feeder gives an error: Code: package example import io.gatling.core.Predef._ import io.gatling.http.Predef._ import io.gatling.jdbc.Predef._ import io.gatling.http.HeaderNames._ import io.gatling.http.check.HttpCheck import scala.concurrent.duration._ import util.Random import io.gatling.jsonpath.JsonPath object Login { val loginFeeder = csv("customer_accounts.csv") ... Removing / commenting the csv line (and anything referencing it) will prevent the error from occurring, though I then have to hard code the values it's meant to replace for the script to actually work. If I put the file somewhere else or otherwise make the path to the file wrong, I get a corresponding error about the CSV file not being found, so it at least gets as far as trying to read the file. Nothing in the CSV file is outside the range of [0-9a-zA-Z] except for the '@' in the email field, and the commas and quotation marks formatting the CSV file itself. This is the error I get: Exception in thread "main" java.lang.NoSuchMethodError: com.fasterxml.jackson.databind.ObjectMapper.readerFor(Ljava/lang/Class;) Lcom/fasterxml/jackson/databind/ObjectReader; at io.gatling.core.feeder.SeparatedValuesParser$.stream( SeparatedValuesParser.scala:46) at io.gatling.core.feeder.SeparatedValuesParser$.io$ gatling$core$feeder$SeparatedValuesParser$$$anonfun$1( SeparatedValuesParser.scala:38) at io.gatling.core.feeder.SeparatedValuesParser$lambda$$ parse$1.apply(SeparatedValuesParser.scala:37) at io.gatling.core.feeder.SeparatedValuesParser$lambda$$ parse$1.apply(SeparatedValuesParser.scala:37)

  1. tyson925 1 times, last 6 months ago
  2. pradhul 3 times, last 11 months ago
  3. kid 1 times, last 4 months ago
  4. regisso 1 times, last 8 months ago
37 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. com.fasterxml.jackson.core.JsonParseException

    Unrecognized token 'payload': was expecting ('true', 'false' or 'null') 2017-01-05T15:17:11.975129+00:00 app[web.1]: at [Source: payload=%7B%22zen%22%3A%22Non-blocking+is+better+than+blocking.%22%2C%22hook_id%22%3A11405663%2C%22hook%22%3A%7B%22type%22%3A%22Repository%22%2C%22id%22%3A11405663%2C%22name%22%3A%22web%22%2C%22active%22%3Atrue%2C%22events%22%3A%5B%22push%22%5D%2C%22config%22%3A%7B%22url%22%3A%22https%3A%2F%2Fmorning-crag-16002.herokuapp.com%2Fapplication%2Fgithub%2Fwebhooks%22%7D%2C%22updated_at%22%3A%222017-01-05T15%3A17%3A11Z%22%2C%22created_at%22%3A%222017-01-05T15%3A17%3A11Z%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%22%2C%22test_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%2Ftest%22%2C%22ping_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%2F11405663%2Fpings%22%2C%22last_response%22%3A%7B%22code%22%3Anull%2C%22status%22%3A%22unused%22%2C%22message%22%3Anull%7D%7D%2C%22repository%22%3A%7B%22id%22%3A77337343%2C%22name%22%3A%22fflib-apex-common%22%2C%22full_name%22%3A%22dieffrei%2Ffflib-apex-common%22%2C%22owner%22%3A%7B%22login%22%3A%22dieffrei%22%2C%22id%22%3A3090010%2C%22avatar_url%22%3A%22https%3A%2F%2Favatars.githubusercontent.com%2Fu%2F3090010%3Fv%3D3%22%2C%22gravatar_id%22%3A%22%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%22%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%22%2C%22followers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowers%22%2C%22following_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowing%7B%2Fother_user%7D%22%2C%22gists_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fgists%7B%2Fgist_id%7D%22%2C%22starred_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fstarred%7B%2Fowner%7D%7B%2Frepo%7D%22%2C%22subscriptions_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fsubscriptions%22%2C%22organizations_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Forgs%22%2C%22repos_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Frepos%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fevents%7B%2Fprivacy%7D%22%2C%22received_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Freceived_events%22%2C%22type%22%3A%22User%22%2C%22site_admin%22%3Afalse%7D%2C%22private%22%3Afalse%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common%22%2C%22description%22%3A%22Common+Apex+Library+supporting+Apex+Enterprise+Patterns+and+much+more%21%22%2C%22fork%22%3Atrue%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%22%2C%22forks_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fforks%22%2C%22keys_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fkeys%7B%2Fkey_id%7D%22%2C%22collaborators_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcollaborators%7B%2Fcollaborator%7D%22%2C%22teams_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fteams%22%2C%22hooks_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fhooks%22%2C%22issue_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%2Fevents%7B%2Fnumber%7D%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fevents%22%2C%22assignees_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fassignees%7B%2Fuser%7D%22%2C%22branches_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fbranches%7B%2Fbranch%7D%22%2C%22tags_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Ftags%22%2C%22blobs_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Fblobs%7B%2Fsha%7D%22%2C%22git_tags_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Ftags%7B%2Fsha%7D%22%2C%22git_refs_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Frefs%7B%2Fsha%7D%22%2C%22trees_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Ftrees%7B%2Fsha%7D%22%2C%22statuses_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fstatuses%2F%7Bsha%7D%22%2C%22languages_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Flanguages%22%2C%22stargazers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fstargazers%22%2C%22contributors_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcontributors%22%2C%22subscribers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fsubscribers%22%2C%22subscription_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fsubscription%22%2C%22commits_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcommits%7B%2Fsha%7D%22%2C%22git_commits_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fgit%2Fcommits%7B%2Fsha%7D%22%2C%22comments_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcomments%7B%2Fnumber%7D%22%2C%22issue_comment_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%2Fcomments%7B%2Fnumber%7D%22%2C%22contents_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcontents%2F%7B%2Bpath%7D%22%2C%22compare_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fcompare%2F%7Bbase%7D...%7Bhead%7D%22%2C%22merges_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fmerges%22%2C%22archive_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2F%7Barchive_format%7D%7B%2Fref%7D%22%2C%22downloads_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fdownloads%22%2C%22issues_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fissues%7B%2Fnumber%7D%22%2C%22pulls_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fpulls%7B%2Fnumber%7D%22%2C%22milestones_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fmilestones%7B%2Fnumber%7D%22%2C%22notifications_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fnotifications%7B%3Fsince%2Call%2Cparticipating%7D%22%2C%22labels_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Flabels%7B%2Fname%7D%22%2C%22releases_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Freleases%7B%2Fid%7D%22%2C%22deployments_url%22%3A%22https%3A%2F%2Fapi.github.com%2Frepos%2Fdieffrei%2Ffflib-apex-common%2Fdeployments%22%2C%22created_at%22%3A%222016-12-25T18%3A07%3A36Z%22%2C%22updated_at%22%3A%222016-12-25T18%3A07%3A39Z%22%2C%22pushed_at%22%3A%222016-12-26T12%3A28%3A45Z%22%2C%22git_url%22%3A%22git%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common.git%22%2C%22ssh_url%22%3A%22git%40github.com%3Adieffrei%2Ffflib-apex-common.git%22%2C%22clone_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common.git%22%2C%22svn_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%2Ffflib-apex-common%22%2C%22homepage%22%3A%22%22%2C%22size%22%3A19892%2C%22stargazers_count%22%3A0%2C%22watchers_count%22%3A0%2C%22language%22%3A%22Apex%22%2C%22has_issues%22%3Afalse%2C%22has_downloads%22%3Atrue%2C%22has_wiki%22%3Atrue%2C%22has_pages%22%3Afalse%2C%22forks_count%22%3A0%2C%22mirror_url%22%3Anull%2C%22open_issues_count%22%3A1%2C%22forks%22%3A0%2C%22open_issues%22%3A1%2C%22watchers%22%3A0%2C%22default_branch%22%3A%22master%22%7D%2C%22sender%22%3A%7B%22login%22%3A%22dieffrei%22%2C%22id%22%3A3090010%2C%22avatar_url%22%3A%22https%3A%2F%2Favatars.githubusercontent.com%2Fu%2F3090010%3Fv%3D3%22%2C%22gravatar_id%22%3A%22%22%2C%22url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%22%2C%22html_url%22%3A%22https%3A%2F%2Fgithub.com%2Fdieffrei%22%2C%22followers_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowers%22%2C%22following_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Ffollowing%7B%2Fother_user%7D%22%2C%22gists_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fgists%7B%2Fgist_id%7D%22%2C%22starred_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fstarred%7B%2Fowner%7D%7B%2Frepo%7D%22%2C%22subscriptions_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fsubscriptions%22%2C%22organizations_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Forgs%22%2C%22repos_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Frepos%22%2C%22events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Fevents%7B%2Fprivacy%7D%22%2C%22received_events_url%22%3A%22https%3A%2F%2Fapi.github.com%2Fusers%2Fdieffrei%2Freceived_events%22%2C%22type%22%3A%22User%22%2C%22site_admin%22%3Afalse%7D%7D; line: 1, column: 8]

    at com.fasterxml.jackson.core.JsonParser._constructError()
  2. Jackson-core
    ReaderBasedJsonParser.nextToken
    1. com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702)
    2. com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558)
    3. com.fasterxml.jackson.core.json.ReaderBasedJsonParser._reportInvalidToken(ReaderBasedJsonParser.java:2836)
    4. com.fasterxml.jackson.core.json.ReaderBasedJsonParser._handleOddValue(ReaderBasedJsonParser.java:1899)
    5. com.fasterxml.jackson.core.json.ReaderBasedJsonParser.nextToken(ReaderBasedJsonParser.java:749)
    5 frames
  3. Jackson
    ObjectMapper.readValue
    1. com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:3825)
    2. com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3774)
    3. com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2833)
    3 frames
  4. com.heroku.devcenter
    GithubWebhookController.greeting
    1. com.heroku.devcenter.GithubWebhookController.greeting(GithubWebhookController.java:26)
    1 frame
  5. Java RT
    Method.invoke
    1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    4. java.lang.reflect.Method.invoke(Method.java:498)
    4 frames
  6. Spring
    InvocableHandlerMethod.invokeForRequest
    1. org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:220)
    2. org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:134)
    2 frames
  7. Spring MVC
    FrameworkServlet.doPost
    1. org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:116)
    2. org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:827)
    3. org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:738)
    4. org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:85)
    5. org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:963)
    6. org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:897)
    7. org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:970)
    8. org.springframework.web.servlet.FrameworkServlet.doPost(FrameworkServlet.java:872)
    8 frames
  8. JavaServlet
    HttpServlet.service
    1. javax.servlet.http.HttpServlet.service(HttpServlet.java:646)
    1 frame
  9. Spring MVC
    FrameworkServlet.service
    1. org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:846)
    1 frame
  10. JavaServlet
    HttpServlet.service
    1. javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
    1 frame
  11. Glassfish Core
    CoyoteAdapter.service
    1. org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:303)
    2. org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
    3. org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:220)
    4. org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:122)
    5. org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:503)
    6. org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:170)
    7. org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:103)
    8. org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:116)
    9. org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:421)
    9 frames
  12. Grizzly HTTP
    NioEndpoint$SocketProcessor.run
    1. org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1070)
    2. org.apache.coyote.AbstractProtocol$AbstractConnectionHandler.process(AbstractProtocol.java:611)
    3. org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1736)
    4. org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.run(NioEndpoint.java:1695)
    4 frames
  13. Java RT
    ThreadPoolExecutor$Worker.run
    1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    2 frames
  14. Tomcat Util
    TaskThread$WrappingRunnable.run
    1. org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
    1 frame
  15. Java RT
    Thread.run
    1. java.lang.Thread.run(Thread.java:745)
    1 frame